species-products-discovery/src/main/java/org/gcube/data/spd/resources/Executor.java

332 lines
10 KiB
Java

package org.gcube.data.spd.resources;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.MediaType;
import org.gcube.common.resources.gcore.HostingNode;
import org.gcube.data.spd.executor.jobs.SpeciesJob;
import org.gcube.data.spd.executor.jobs.URLJob;
import org.gcube.data.spd.executor.jobs.csv.CSVCreator;
import org.gcube.data.spd.executor.jobs.csv.CSVCreatorForOMJob;
import org.gcube.data.spd.executor.jobs.darwincore.DarwinCoreJob;
import org.gcube.data.spd.executor.jobs.dwca.DWCAJobByChildren;
import org.gcube.data.spd.executor.jobs.dwca.DWCAJobByIds;
import org.gcube.data.spd.executor.jobs.layer.LayerCreatorJob;
import org.gcube.data.spd.manager.AppInitializer;
import org.gcube.data.spd.model.exceptions.IdNotValidException;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
import org.gcube.data.spd.model.service.exceptions.InvalidJobException;
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
import org.gcube.data.spd.model.service.types.JobStatus;
import org.gcube.data.spd.model.service.types.NodeStatus;
import org.gcube.data.spd.model.service.types.SubmitJob;
import org.gcube.data.spd.model.util.SerializableList;
import org.gcube.data.spd.plugin.PluginManager;
import org.gcube.data.spd.utils.DynamicList;
import org.gcube.data.spd.utils.DynamicMap;
import org.gcube.data.spd.utils.ExecutorsContainer;
import org.gcube.smartgears.ApplicationManagerProvider;
import org.gcube.smartgears.ContextProvider;
import org.gcube.smartgears.context.application.ApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Path("job")
public class Executor {
private static Logger logger = LoggerFactory.getLogger(Executor.class);
public static HashMap<String, SpeciesJob> jobMap= new HashMap<String, SpeciesJob>();
private static final String jobMapFileName = "jobs.ser";
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get(AppInitializer.class);
ApplicationContext cxt = ContextProvider.get();
@GET
@Path("result/{jobKey}")
public String getResultLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
String node;
String jobId;
try{
node = extractNode(jobKey);
jobId = extractId(jobKey);
}catch (IdNotValidException e) {
logger.error("id not valid "+jobKey,e);
throw new InvalidIdentifierException(jobKey);
}
if (node.equals(cxt.container().profile(HostingNode.class).id())){
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
return ((URLJob)jobMap.get(jobId)).getResultURL();
}else {
//TODO
return null; // remoteJobCall(node).getResultLink(jobKey);
}
}
@GET
@Path("error/{jobKey}")
public String getErrorLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
String node;
String jobId;
try{
node = extractNode(jobKey);
jobId = extractId(jobKey);
}catch (IdNotValidException e) {
logger.error("id not valid "+jobKey,e);
throw new InvalidIdentifierException();
}
if (node.equals(cxt.container().profile(HostingNode.class).id())){
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException();
return ((URLJob)jobMap.get(jobId)).getErrorURL();
}else{
//TODO
return null; // remoteJobCall(node).getErrorLink(jobKey);
}
}
@GET
@Path("status/{jobKey}")
public CompleteJobStatus getStatus(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
String node;
String jobId;
try{
node = extractNode(jobKey);
jobId = extractId(jobKey);
}catch (IdNotValidException e) {
logger.error("id not valid "+jobKey,e);
throw new InvalidIdentifierException(jobKey);
}
if (node.equals(cxt.container().profile(HostingNode.class).id())){
if (!jobMap.containsKey(jobId)){
logger.trace("id not found, throwing IDNotValidExceoption");
throw new InvalidIdentifierException(jobId);
}
SpeciesJob job = jobMap.get(jobId);
CompleteJobStatus status = new CompleteJobStatus();
if (job instanceof DWCAJobByChildren){
DWCAJobByChildren dwcaJob = (DWCAJobByChildren) job;
List<NodeStatus> childrenStatus = new ArrayList<NodeStatus>();
for (Entry<TaxonomyItem, JobStatus> entry : dwcaJob.getMapSubJobs().entrySet()){
NodeStatus childStatus = new NodeStatus(entry.getKey().getScientificName(), entry.getValue());
childrenStatus.add(childStatus);
}
status.setSubNodes(childrenStatus);
}
status.setStatus(job.getStatus());
status.setStartDate(job.getStartDate());
status.setEndDate(job.getEndDate());
status.setCompletedEntries(job.getCompletedEntries());
return status;
}else{
//TODO
return null ; //remoteJobCall(node).getStatus(jobKey);
}
}
public static void storeJobMap(ApplicationContext context){
logger.trace("calling store job Map");
ObjectOutputStream oos = null;
File file = null;
try {
file = context.persistence().file(jobMapFileName);
//if (file.exists()) file.delete();
//file.createNewFile();
oos = new ObjectOutputStream(new FileOutputStream(file));
oos.writeObject(jobMap);
} catch (Exception e) {
logger.error("error writing jobMapof type "+jobMap.getClass().getName()+" on disk",e);
if (file !=null && file.exists()) file.delete();
}finally{
if (oos!=null)
try {
oos.close();
} catch (IOException e) {
logger.warn("error closing stream",e);
}
}
}
@SuppressWarnings("unchecked")
public static void loadJobMap(ApplicationContext context){
logger.trace("calling load job Map");
ObjectInput ois;
try {
ois = new ObjectInputStream(new FileInputStream(context.persistence().file(jobMapFileName)));
jobMap = (HashMap<String, SpeciesJob>) ois.readObject();
for (Entry<String, SpeciesJob> entry : jobMap.entrySet())
if (entry.getValue().getStatus().equals(JobStatus.RUNNING))
entry.getValue().setStatus(JobStatus.FAILED);
ois.close();
} catch (Exception e) {
logger.trace("the file doesn't exist, creating an empty map");
jobMap = new HashMap<String, SpeciesJob>();
}
}
@PUT
@Path("input/{jobKey}")
@Consumes(MediaType.APPLICATION_XML)
public boolean submitJob(@PathParam("jobKey") String jobKey, SerializableList<String> input) throws InvalidIdentifierException {
//String node;
String jobId;
try{
//node = extractNode(jobKey);
jobId = extractId(jobKey);
}catch (IdNotValidException e) {
logger.error("id not valid "+jobKey,e);
throw new InvalidIdentifierException(jobKey);
}
logger.trace("job Id extracted is {} ",jobId);
if (input.getValuesList().isEmpty()){
logger.info("closing input stream");
DynamicMap.remove(jobId);
}
else {
DynamicList list = DynamicMap.get(jobId);
for (String id : input.getValuesList()){
logger.trace("elaborating input id ",id);
if (!list.add(id)) return false;
}
}
return true;
}
@DELETE
@Path("{jobKey}")
public void removeJob(@PathParam("jobKey") String jobId) throws InvalidIdentifierException {
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
jobMap.remove(jobId);
}
@POST
@Path("execute")
@Consumes(MediaType.APPLICATION_XML)
public String submitJob(SubmitJob request) throws InvalidJobException {
PluginManager pluginManger = initializer.getPluginManager();
SpeciesJob job = null;
switch (request.getJob()) {
case DWCAByChildren:
job = new DWCAJobByChildren(request.getInput(), pluginManger.plugins());
break;
case DWCAById:
job = new DWCAJobByIds(pluginManger.plugins());
DynamicMap.put(job.getId());
break;
case CSV:
job = new CSVCreator(pluginManger.plugins());
DynamicMap.put(job.getId());
break;
case CSVForOM:
job = new CSVCreatorForOMJob(pluginManger.plugins());
DynamicMap.put(job.getId());
break;
case DarwinCore:
job = new DarwinCoreJob(pluginManger.plugins());
DynamicMap.put(job.getId());
break;
case LayerCreator:
job = new LayerCreatorJob(request.getInput(),pluginManger.plugins());
DynamicMap.put(job.getId());
break;
default:
throw new InvalidJobException();
}
if (job ==null || !job.validateInput(request.getInput()))
throw new InvalidJobException();
return executeJob(job);
}
private String executeJob(SpeciesJob job){
jobMap.put(job.getId(), job);
ExecutorsContainer.execJob(job);
return createKey(job.getId());
}
private static String extractNode(String key) throws IdNotValidException{
String[] splitted = key.split("\\|\\|");
if (splitted.length==2)
return splitted[0];
else throw new IdNotValidException();
}
private static String extractId(String key) throws IdNotValidException{
String[] splitted = key.split("\\|\\|");
if (splitted.length==2)
return splitted[1];
else throw new IdNotValidException();
}
private String createKey(String id){
String node = cxt.container().profile(HostingNode.class).id();
return node+"||"+id;
}
/*
private Executor remoteJobCall(String riId) throws InvalidIdentifierException{
SimpleQuery query = queryFor(GCoreEndpoint.class);
query.addCondition("$resource/ID/text() eq '"+riId+"'");
DiscoveryClient<GCoreEndpoint> client = clientFor(GCoreEndpoint.class);
List<GCoreEndpoint> addresses = client.submit(query);
if (addresses.size()>0){
GCoreEndpoint endpoint = addresses.get(0);
URI address = endpoint.profile().endpointMap().get("gcube/data/speciesproductsdiscovery/executor").uri();
try {
Executor executorPT = executor().at(address).build();
return executorPT;
} catch (Exception e) {
logger.trace("remote service error");
throw new InvalidIdentifierException();
}
}else {
logger.trace("remote job not found");
throw new InvalidIdentifierException();
}
}*/
}