git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/branches/data-access/species-products-discovery/3.0@144393 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
9e57374771
commit
b613a26119
|
@ -6,8 +6,6 @@
|
||||||
<local-persistence location='target' />
|
<local-persistence location='target' />
|
||||||
|
|
||||||
<exclude>/gcube/service/resultset/*</exclude>
|
<exclude>/gcube/service/resultset/*</exclude>
|
||||||
<exclude handlers='request-accounting'>/gcube/service/*</exclude>
|
|
||||||
|
|
||||||
|
|
||||||
</application>
|
</application>
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
<artifactId>${artifactId}</artifactId>
|
<artifactId>${artifactId}</artifactId>
|
||||||
<version>${version}</version>
|
<version>${version}</version>
|
||||||
</MavenCoordinates>
|
</MavenCoordinates>
|
||||||
<Type>library</Type>
|
|
||||||
<Files>
|
<Files>
|
||||||
<File>${build.finalName}.war</File>
|
<File>${build.finalName}.war</File>
|
||||||
</Files>
|
</Files>
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
package org.gcube.data.spd.executor.jobs;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Calendar;
|
||||||
|
|
||||||
|
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||||
|
|
||||||
|
public class SerializableSpeciesJob extends SpeciesJob implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
|
private JobStatus status;
|
||||||
|
private String id;
|
||||||
|
private int completedEntries;
|
||||||
|
private Calendar startDate;
|
||||||
|
private Calendar endDate;
|
||||||
|
|
||||||
|
public SerializableSpeciesJob(JobStatus status, String id,
|
||||||
|
int completedEntries, Calendar startDate, Calendar endDate) {
|
||||||
|
super();
|
||||||
|
this.status = status!=JobStatus.COMPLETED?JobStatus.FAILED:JobStatus.COMPLETED;
|
||||||
|
this.id = id;
|
||||||
|
this.completedEntries = completedEntries;
|
||||||
|
this.startDate = startDate;
|
||||||
|
this.endDate = endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() {}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isResubmitPermitted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JobStatus getStatus() {
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setStatus(JobStatus status) {
|
||||||
|
this.status= status;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean validateInput(String input) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getCompletedEntries() {
|
||||||
|
return completedEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Calendar getStartDate() {
|
||||||
|
return startDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Calendar getEndDate() {
|
||||||
|
return endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,24 +1,72 @@
|
||||||
package org.gcube.data.spd.executor.jobs;
|
package org.gcube.data.spd.executor.jobs;
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.Calendar;
|
import java.util.Calendar;
|
||||||
|
|
||||||
|
import org.gcube.accounting.datamodel.UsageRecord.OperationResult;
|
||||||
|
import org.gcube.accounting.datamodel.usagerecords.JobUsageRecord;
|
||||||
|
import org.gcube.accounting.persistence.AccountingPersistence;
|
||||||
|
import org.gcube.accounting.persistence.AccountingPersistenceFactory;
|
||||||
|
import org.gcube.common.authorization.library.provider.AuthorizationProvider;
|
||||||
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public interface SpeciesJob extends Serializable, Runnable {
|
public abstract class SpeciesJob implements Runnable {
|
||||||
|
|
||||||
public JobStatus getStatus() ;
|
private static Logger log = LoggerFactory.getLogger(SpeciesJob.class);
|
||||||
|
|
||||||
public void setStatus(JobStatus status) ;
|
public abstract JobStatus getStatus() ;
|
||||||
|
|
||||||
|
public abstract void setStatus(JobStatus status) ;
|
||||||
|
|
||||||
|
public abstract String getId();
|
||||||
|
|
||||||
|
public abstract boolean validateInput(String input);
|
||||||
|
|
||||||
|
public abstract int getCompletedEntries();
|
||||||
|
|
||||||
|
public abstract Calendar getStartDate();
|
||||||
|
|
||||||
|
public abstract Calendar getEndDate();
|
||||||
|
|
||||||
|
public abstract void execute();
|
||||||
|
|
||||||
|
public abstract boolean isResubmitPermitted();
|
||||||
|
|
||||||
|
public final void run(){
|
||||||
|
if (getStatus()!=JobStatus.PENDING && !isResubmitPermitted()){
|
||||||
|
log.warn("the job with id {} cannot be resubmitted",getId());
|
||||||
|
throw new IllegalStateException("this job cannot be resubmitted");
|
||||||
|
}
|
||||||
|
try{
|
||||||
|
execute();
|
||||||
|
}catch(Exception e){
|
||||||
|
log.error("unexpected exception in job, setting status to FAILED",e);
|
||||||
|
this.setStatus(JobStatus.FAILED);
|
||||||
|
}
|
||||||
|
generateAccounting();
|
||||||
|
}
|
||||||
|
|
||||||
public String getId();
|
|
||||||
|
|
||||||
public boolean validateInput(String input);
|
private final void generateAccounting(){
|
||||||
|
AccountingPersistence persistence = AccountingPersistenceFactory.getPersistence();
|
||||||
public int getCompletedEntries();
|
JobUsageRecord jobUsageRecord = new JobUsageRecord();
|
||||||
|
try{
|
||||||
public Calendar getStartDate();
|
|
||||||
|
jobUsageRecord.setConsumerId(AuthorizationProvider.instance.get().getClient().getId());
|
||||||
public Calendar getEndDate();
|
jobUsageRecord.setScope(ScopeProvider.instance.get());
|
||||||
|
jobUsageRecord.setJobName(this.getClass().getSimpleName());
|
||||||
|
jobUsageRecord.setOperationResult(getStatus()==JobStatus.COMPLETED?OperationResult.SUCCESS:OperationResult.FAILED);
|
||||||
|
jobUsageRecord.setJobId(this.getId());
|
||||||
|
jobUsageRecord.setJobStartTime(this.getStartDate());
|
||||||
|
jobUsageRecord.setJobEndTime(this.getEndDate());
|
||||||
|
|
||||||
|
persistence.account(jobUsageRecord);
|
||||||
|
log.info("Job {} accounted successfully",getId());
|
||||||
|
}catch(Exception ex){
|
||||||
|
log.warn("invalid record passed to accounting ",ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
package org.gcube.data.spd.executor.jobs;
|
package org.gcube.data.spd.executor.jobs;
|
||||||
|
|
||||||
public interface URLJob extends SpeciesJob {
|
public abstract class URLJob extends SpeciesJob {
|
||||||
|
|
||||||
public String getResultURL() ;
|
public abstract String getResultURL() ;
|
||||||
|
|
||||||
public String getErrorURL() ;
|
public abstract String getErrorURL() ;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,12 +8,6 @@ import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||||
|
|
||||||
public class CSVCreator extends CSVJob{
|
public class CSVCreator extends CSVJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
|
|
||||||
private static transient OccurrenceCSVConverter converter;
|
private static transient OccurrenceCSVConverter converter;
|
||||||
|
|
||||||
public CSVCreator(Map<String, AbstractPlugin> plugins) {
|
public CSVCreator(Map<String, AbstractPlugin> plugins) {
|
||||||
|
@ -31,4 +25,5 @@ public class CSVCreator extends CSVJob{
|
||||||
public List<String> getHeader() {
|
public List<String> getHeader() {
|
||||||
return OccurrenceCSVConverter.HEADER;
|
return OccurrenceCSVConverter.HEADER;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,12 +7,6 @@ import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||||
|
|
||||||
public class CSVCreatorForOMJob extends CSVJob{
|
public class CSVCreatorForOMJob extends CSVJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
|
|
||||||
private static transient OccurrenceCSVConverterOpenModeller converter;
|
private static transient OccurrenceCSVConverterOpenModeller converter;
|
||||||
|
|
||||||
|
|
|
@ -30,14 +30,7 @@ import org.gcube.data.streams.Stream;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
abstract class CSVJob implements URLJob{
|
abstract class CSVJob extends URLJob{
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(CSVJob.class);
|
private static Logger logger = LoggerFactory.getLogger(CSVJob.class);
|
||||||
|
|
||||||
|
@ -62,9 +55,8 @@ abstract class CSVJob implements URLJob{
|
||||||
this.plugins = plugins;
|
this.plugins = plugins;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void execute() {
|
||||||
File csvFile = null;
|
File csvFile = null;
|
||||||
try{
|
try{
|
||||||
this.startDate = Calendar.getInstance();
|
this.startDate = Calendar.getInstance();
|
||||||
|
@ -161,7 +153,6 @@ abstract class CSVJob implements URLJob{
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getErrorURL() {
|
public String getErrorURL() {
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,4 +182,10 @@ abstract class CSVJob implements URLJob{
|
||||||
return startDate;
|
return startDate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isResubmitPermitted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,12 +37,7 @@ import org.gcube.data.streams.generators.Generator;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class DarwinCoreJob implements URLJob{
|
public class DarwinCoreJob extends URLJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(DarwinCoreJob.class);
|
private static Logger logger = LoggerFactory.getLogger(DarwinCoreJob.class);
|
||||||
|
|
||||||
|
@ -67,10 +62,14 @@ public class DarwinCoreJob implements URLJob{
|
||||||
this.status = JobStatus.PENDING;
|
this.status = JobStatus.PENDING;
|
||||||
this.plugins = plugins;
|
this.plugins = plugins;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public boolean isResubmitPermitted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() {
|
||||||
File darwincoreFile =null;
|
File darwincoreFile =null;
|
||||||
File errorFile = null;
|
File errorFile = null;
|
||||||
try{
|
try{
|
||||||
|
|
|
@ -35,12 +35,7 @@ import org.gcube.data.spd.utils.Utils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class DWCAJobByChildren implements URLJob{
|
public class DWCAJobByChildren extends URLJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
||||||
|
|
||||||
|
@ -73,7 +68,7 @@ public class DWCAJobByChildren implements URLJob{
|
||||||
}
|
}
|
||||||
|
|
||||||
private AbstractPlugin pluginToUse = null;
|
private AbstractPlugin pluginToUse = null;
|
||||||
|
|
||||||
private AbstractPlugin getPlugin(String key) throws Exception{
|
private AbstractPlugin getPlugin(String key) throws Exception{
|
||||||
if (pluginToUse==null){
|
if (pluginToUse==null){
|
||||||
String pluginName = Util.getProviderFromKey(key);
|
String pluginName = Util.getProviderFromKey(key);
|
||||||
|
@ -89,7 +84,12 @@ public class DWCAJobByChildren implements URLJob{
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public boolean isResubmitPermitted() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() {
|
||||||
File errorFile = null;
|
File errorFile = null;
|
||||||
File dwcaFile = null;
|
File dwcaFile = null;
|
||||||
try{
|
try{
|
||||||
|
|
|
@ -36,12 +36,7 @@ import org.gcube.data.streams.Stream;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class DWCAJobByIds implements URLJob{
|
public class DWCAJobByIds extends URLJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = 1L;
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
||||||
|
|
||||||
|
@ -66,9 +61,17 @@ public class DWCAJobByIds implements URLJob{
|
||||||
this.status = JobStatus.PENDING;
|
this.status = JobStatus.PENDING;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public boolean isResubmitPermitted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute() {
|
||||||
File errorsFile= null;
|
File errorsFile= null;
|
||||||
File dwcaFile = null;
|
File dwcaFile = null;
|
||||||
try{
|
try{
|
||||||
|
|
|
@ -24,12 +24,8 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.thoughtworks.xstream.XStream;
|
import com.thoughtworks.xstream.XStream;
|
||||||
|
|
||||||
public class LayerCreatorJob implements URLJob{
|
public class LayerCreatorJob extends URLJob{
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = -6560318170190865925L;
|
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(LayerCreatorJob.class);
|
private static Logger logger = LoggerFactory.getLogger(LayerCreatorJob.class);
|
||||||
|
|
||||||
|
@ -55,6 +51,11 @@ public class LayerCreatorJob implements URLJob{
|
||||||
this.plugins = plugins;
|
this.plugins = plugins;
|
||||||
this.metadata = (MetadataDetails) new XStream().fromXML(metadataDetails);
|
this.metadata = (MetadataDetails) new XStream().fromXML(metadataDetails);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isResubmitPermitted() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public JobStatus getStatus() {
|
public JobStatus getStatus() {
|
||||||
|
@ -107,7 +108,7 @@ public class LayerCreatorJob implements URLJob{
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void execute() {
|
||||||
try{
|
try{
|
||||||
this.startDate = Calendar.getInstance();
|
this.startDate = Calendar.getInstance();
|
||||||
this.status = JobStatus.RUNNING;
|
this.status = JobStatus.RUNNING;
|
||||||
|
|
|
@ -1,7 +1,20 @@
|
||||||
package org.gcube.data.spd.manager;
|
package org.gcube.data.spd.manager;
|
||||||
|
|
||||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectInput;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
import java.io.ObjectOutputStream;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
|
import net.sf.ehcache.CacheManager;
|
||||||
|
|
||||||
import org.gcube.common.scope.api.ScopeProvider;
|
import org.gcube.common.scope.api.ScopeProvider;
|
||||||
|
import org.gcube.data.spd.executor.jobs.SerializableSpeciesJob;
|
||||||
|
import org.gcube.data.spd.executor.jobs.SpeciesJob;
|
||||||
import org.gcube.data.spd.plugin.PluginManager;
|
import org.gcube.data.spd.plugin.PluginManager;
|
||||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||||
import org.gcube.smartgears.ApplicationManager;
|
import org.gcube.smartgears.ApplicationManager;
|
||||||
|
@ -12,27 +25,96 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class AppInitializer implements ApplicationManager {
|
public class AppInitializer implements ApplicationManager {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(AppInitializer.class);
|
private static final Logger logger = LoggerFactory.getLogger(AppInitializer.class);
|
||||||
|
|
||||||
|
private static final String jobMapFileNamePrefix = "jobs";
|
||||||
|
|
||||||
|
private HashMap<String, SpeciesJob> jobMap;
|
||||||
|
|
||||||
private PluginManager pluginManager;
|
private PluginManager pluginManager;
|
||||||
|
|
||||||
private ApplicationContext ctx = ContextProvider.get();
|
private ApplicationContext ctx = ContextProvider.get();
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onInit() {
|
public void onInit() {
|
||||||
log.info("security token is "+SecurityTokenProvider.instance.get());
|
logger.info("[TEST] init called for SPD in scope {} ", ScopeProvider.instance.get());
|
||||||
|
jobMap= new HashMap<String, SpeciesJob>();
|
||||||
pluginManager = new PluginManager(ctx);
|
pluginManager = new PluginManager(ctx);
|
||||||
|
loadJobMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onShutdown() {
|
public void onShutdown() {
|
||||||
|
storeJobMap();
|
||||||
pluginManager.shutdown();
|
pluginManager.shutdown();
|
||||||
pluginManager = null;
|
pluginManager = null;
|
||||||
ExecutorsContainer.stopAll();
|
ExecutorsContainer.stopAll();
|
||||||
log.info("App Initializer shut down on "+ScopeProvider.instance.get());
|
CacheManager.getInstance().shutdown();
|
||||||
|
logger.info("[TEST] App Initializer shut down on "+ScopeProvider.instance.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
public PluginManager getPluginManager() {
|
public PluginManager getPluginManager() {
|
||||||
return pluginManager;
|
return pluginManager;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public HashMap<String, SpeciesJob> getJobMap() {
|
||||||
|
return jobMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void storeJobMap(){
|
||||||
|
String scopeNamePrefix= ScopeProvider.instance.get().replaceAll("/", ".");
|
||||||
|
String jobMapFileName = jobMapFileNamePrefix+scopeNamePrefix;
|
||||||
|
logger.trace("[TEST] storing job map file {}",jobMapFileName);
|
||||||
|
HashMap<String, SerializableSpeciesJob> spdJobMap = new HashMap<String, SerializableSpeciesJob>();
|
||||||
|
for (Entry<String, SpeciesJob> entry : jobMap.entrySet() ){
|
||||||
|
logger.trace("[TEST] stored job with id {}",entry.getKey());
|
||||||
|
SpeciesJob spdJob = entry.getValue();
|
||||||
|
if (spdJob instanceof SerializableSpeciesJob)
|
||||||
|
spdJobMap.put(entry.getKey(),(SerializableSpeciesJob)spdJob);
|
||||||
|
else
|
||||||
|
spdJobMap.put(entry.getKey(), new SerializableSpeciesJob(spdJob.getStatus(), spdJob.getId(),
|
||||||
|
spdJob.getCompletedEntries(), spdJob.getStartDate(), spdJob.getEndDate()));
|
||||||
|
}
|
||||||
|
|
||||||
|
File file = null;
|
||||||
|
try {
|
||||||
|
file = ctx.persistence().file(jobMapFileName);
|
||||||
|
if (file.exists()) file.delete();
|
||||||
|
file.createNewFile();
|
||||||
|
|
||||||
|
try(ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(file))){
|
||||||
|
oos.writeObject(spdJobMap);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("error writing jobMapof type "+jobMap.getClass().getName()+" on disk",e);
|
||||||
|
if (file !=null && file.exists()) file.delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private void loadJobMap(){
|
||||||
|
String scopeNamePrefix= ScopeProvider.instance.get().replaceAll("/", ".");
|
||||||
|
String jobMapFileName = jobMapFileNamePrefix+scopeNamePrefix;
|
||||||
|
logger.trace("[TEST] loading job Map from file {} ",jobMapFileName);
|
||||||
|
File file = ctx.persistence().file(jobMapFileName);
|
||||||
|
if (file.exists()){
|
||||||
|
file.delete();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
file.createNewFile();
|
||||||
|
} catch (IOException e1) {
|
||||||
|
logger.error("cannot create file {}",file.getAbsolutePath(),e1);
|
||||||
|
jobMap= new HashMap<String, SpeciesJob>();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try (ObjectInput ois = new ObjectInputStream(new FileInputStream(file))){
|
||||||
|
jobMap = (HashMap<String, SpeciesJob>) ois.readObject();
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warn("[TEST] the file {} doesn't exist, creating an empty map",file.getAbsolutePath());
|
||||||
|
jobMap= new HashMap<String, SpeciesJob>();
|
||||||
|
}
|
||||||
|
logger.trace("[TEST] loaded map is with lenght {} ",jobMap.size());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,6 +66,7 @@ public class Search<T extends ResultElement> {
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void search(Map<String, Searchable<T>> searchableMapping, Query parsedQuery, Condition ... properties) throws UnsupportedCapabilityException, UnsupportedPluginException, Exception {
|
public void search(Map<String, Searchable<T>> searchableMapping, Query parsedQuery, Condition ... properties) throws UnsupportedCapabilityException, UnsupportedPluginException, Exception {
|
||||||
|
|
||||||
ClosableWriter<T> outputWriter = new Writer<T>(wrapper);
|
ClosableWriter<T> outputWriter = new Writer<T>(wrapper);
|
||||||
//preparing the query (and checking semantic)
|
//preparing the query (and checking semantic)
|
||||||
List<Worker<?, ?>> workers = new ArrayList<Worker<?, ?>>();
|
List<Worker<?, ?>> workers = new ArrayList<Worker<?, ?>>();
|
||||||
|
|
|
@ -1,14 +1,6 @@
|
||||||
package org.gcube.data.spd.resources;
|
package org.gcube.data.spd.resources;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileOutputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.ObjectInput;
|
|
||||||
import java.io.ObjectInputStream;
|
|
||||||
import java.io.ObjectOutputStream;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
|
@ -55,9 +47,7 @@ public class Executor {
|
||||||
|
|
||||||
private static Logger logger = LoggerFactory.getLogger(Executor.class);
|
private static Logger logger = LoggerFactory.getLogger(Executor.class);
|
||||||
|
|
||||||
public static HashMap<String, SpeciesJob> jobMap= new HashMap<String, SpeciesJob>();
|
|
||||||
|
|
||||||
private static final String jobMapFileName = "jobs.ser";
|
|
||||||
|
|
||||||
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get(AppInitializer.class);
|
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get(AppInitializer.class);
|
||||||
|
|
||||||
|
@ -66,7 +56,6 @@ public class Executor {
|
||||||
@GET
|
@GET
|
||||||
@Path("result/{jobKey}")
|
@Path("result/{jobKey}")
|
||||||
public String getResultLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
public String getResultLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
||||||
|
|
||||||
String node;
|
String node;
|
||||||
String jobId;
|
String jobId;
|
||||||
|
|
||||||
|
@ -79,11 +68,14 @@ public class Executor {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
|
if (!initializer.getJobMap().containsKey(jobId)){
|
||||||
return ((URLJob)jobMap.get(jobId)).getResultURL();
|
logger.error("id not valid {} ",jobId);
|
||||||
|
throw new InvalidIdentifierException(jobId);
|
||||||
|
}
|
||||||
|
return ((URLJob)initializer.getJobMap().get(jobId)).getResultURL();
|
||||||
}else {
|
}else {
|
||||||
//TODO
|
logger.error("node not valid {} ",node);
|
||||||
return null; // remoteJobCall(node).getResultLink(jobKey);
|
throw new InvalidIdentifierException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,18 +95,21 @@ public class Executor {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException();
|
if (!initializer.getJobMap().containsKey(jobId)){
|
||||||
return ((URLJob)jobMap.get(jobId)).getErrorURL();
|
logger.error("id not valid {} ",jobId);
|
||||||
|
throw new InvalidIdentifierException();
|
||||||
|
}
|
||||||
|
return ((URLJob)initializer.getJobMap().get(jobId)).getErrorURL();
|
||||||
}else{
|
}else{
|
||||||
//TODO
|
logger.error("node not valid {} ",node);
|
||||||
return null; // remoteJobCall(node).getErrorLink(jobKey);
|
throw new InvalidIdentifierException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
@Path("status/{jobKey}")
|
@Path("status/{jobKey}")
|
||||||
public CompleteJobStatus getStatus(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
public CompleteJobStatus getStatus(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
||||||
|
logger.trace("[TEST] job status called with id {}", jobKey);
|
||||||
String node;
|
String node;
|
||||||
String jobId;
|
String jobId;
|
||||||
|
|
||||||
|
@ -122,18 +117,18 @@ public class Executor {
|
||||||
node = extractNode(jobKey);
|
node = extractNode(jobKey);
|
||||||
jobId = extractId(jobKey);
|
jobId = extractId(jobKey);
|
||||||
}catch (IdNotValidException e) {
|
}catch (IdNotValidException e) {
|
||||||
logger.error("id not valid "+jobKey,e);
|
logger.error("id not valid {} ",jobKey,e);
|
||||||
throw new InvalidIdentifierException(jobKey);
|
throw new InvalidIdentifierException(jobKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||||
|
|
||||||
if (!jobMap.containsKey(jobId)){
|
if (!initializer.getJobMap().containsKey(jobId)){
|
||||||
logger.trace("id not found, throwing IDNotValidExceoption");
|
logger.warn("id not found, throwing IDNotValidExceoption");
|
||||||
throw new InvalidIdentifierException(jobId);
|
throw new InvalidIdentifierException(jobId);
|
||||||
}
|
}
|
||||||
|
|
||||||
SpeciesJob job = jobMap.get(jobId);
|
SpeciesJob job = initializer.getJobMap().get(jobId);
|
||||||
|
|
||||||
CompleteJobStatus status = new CompleteJobStatus();
|
CompleteJobStatus status = new CompleteJobStatus();
|
||||||
|
|
||||||
|
@ -155,60 +150,17 @@ public class Executor {
|
||||||
|
|
||||||
return status;
|
return status;
|
||||||
}else{
|
}else{
|
||||||
//TODO
|
logger.error("node not valid {} ",node);
|
||||||
return null ; //remoteJobCall(node).getStatus(jobKey);
|
throw new InvalidIdentifierException();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static void storeJobMap(ApplicationContext context){
|
|
||||||
logger.trace("calling store job Map");
|
|
||||||
ObjectOutputStream oos = null;
|
|
||||||
File file = null;
|
|
||||||
try {
|
|
||||||
file = context.persistence().file(jobMapFileName);
|
|
||||||
//if (file.exists()) file.delete();
|
|
||||||
//file.createNewFile();
|
|
||||||
oos = new ObjectOutputStream(new FileOutputStream(file));
|
|
||||||
oos.writeObject(jobMap);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.error("error writing jobMapof type "+jobMap.getClass().getName()+" on disk",e);
|
|
||||||
if (file !=null && file.exists()) file.delete();
|
|
||||||
}finally{
|
|
||||||
if (oos!=null)
|
|
||||||
try {
|
|
||||||
oos.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.warn("error closing stream",e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static void loadJobMap(ApplicationContext context){
|
|
||||||
logger.trace("calling load job Map");
|
|
||||||
ObjectInput ois;
|
|
||||||
try {
|
|
||||||
ois = new ObjectInputStream(new FileInputStream(context.persistence().file(jobMapFileName)));
|
|
||||||
jobMap = (HashMap<String, SpeciesJob>) ois.readObject();
|
|
||||||
for (Entry<String, SpeciesJob> entry : jobMap.entrySet())
|
|
||||||
if (entry.getValue().getStatus().equals(JobStatus.RUNNING))
|
|
||||||
entry.getValue().setStatus(JobStatus.FAILED);
|
|
||||||
ois.close();
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.trace("the file doesn't exist, creating an empty map");
|
|
||||||
jobMap = new HashMap<String, SpeciesJob>();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@DELETE
|
@DELETE
|
||||||
@Path("{jobKey}")
|
@Path("{jobKey}")
|
||||||
public void removeJob(@PathParam("jobKey") String jobId) throws InvalidIdentifierException {
|
public void removeJob(@PathParam("jobKey") String jobId) throws InvalidIdentifierException {
|
||||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
|
if (!initializer.getJobMap().containsKey(jobId)) throw new InvalidIdentifierException(jobId);
|
||||||
jobMap.remove(jobId);
|
initializer.getJobMap().remove(jobId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -249,15 +201,15 @@ public class Executor {
|
||||||
|
|
||||||
if (job ==null || !job.validateInput(request.getInput()))
|
if (job ==null || !job.validateInput(request.getInput()))
|
||||||
throw new InvalidJobException();
|
throw new InvalidJobException();
|
||||||
|
|
||||||
String jobId = executeJob(job);
|
String jobId = executeJob(job);
|
||||||
|
logger.trace("[TEST] job submitted with id {}", jobId);
|
||||||
return new SubmitJobResponse(job.getId(), jobId, cxt.profile(GCoreEndpoint.class).id());
|
return new SubmitJobResponse(job.getId(), jobId, cxt.profile(GCoreEndpoint.class).id());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private String executeJob(SpeciesJob job){
|
private String executeJob(SpeciesJob job){
|
||||||
jobMap.put(job.getId(), job);
|
initializer.getJobMap().put(job.getId(), job);
|
||||||
ExecutorsContainer.execJob(job);
|
ExecutorsContainer.execJob(job);
|
||||||
return createKey(job.getId());
|
return createKey(job.getId());
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,15 +3,17 @@ package org.gcube.data.spd.utils;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
|
|
||||||
|
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
|
|
||||||
public class ExecutorsContainer {
|
public class ExecutorsContainer {
|
||||||
|
|
||||||
private static final int MAX_SEARCH_THREAD_POOL= 100;
|
private static final int MAX_SEARCH_THREAD_POOL= 100;
|
||||||
|
|
||||||
private static final int MAX_JOB_POOL= 10;
|
private static final int MAX_JOB_POOL= 10;
|
||||||
|
|
||||||
private static ExecutorService searchThreadPool = Executors.newFixedThreadPool(MAX_SEARCH_THREAD_POOL);;
|
private static ExecutorService searchThreadPool = Executors.newFixedThreadPool(MAX_SEARCH_THREAD_POOL, new ThreadFactoryBuilder().setNameFormat("spd-search-thread-%d").build());
|
||||||
|
|
||||||
private static ExecutorService jobThreadPool = Executors.newFixedThreadPool(MAX_JOB_POOL);
|
private static ExecutorService jobThreadPool = Executors.newFixedThreadPool(MAX_JOB_POOL,new ThreadFactoryBuilder().setNameFormat("spd-job-thread-%d").build());
|
||||||
|
|
||||||
|
|
||||||
public static void execSearch(Runnable runnable){
|
public static void execSearch(Runnable runnable){
|
||||||
|
|
Reference in New Issue