updated Analytics Resovler

fixing CkanCatalogueConfigurationsReader

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/data-transfer/uri-resolver@174816 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Francesco Mangiacrapa 2018-12-12 14:38:44 +00:00
parent ca997f7ce7
commit 80f090086d
7 changed files with 118 additions and 142 deletions

View File

@ -2,6 +2,7 @@ package org.gcube.datatransfer.resolver;
import javax.ws.rs.Path;
import org.gcube.data.analysis.dminvocation.model.DataMinerInvocation;
import org.gcube.datatransfer.resolver.init.UriResolverSmartGearManagerInit;
import org.gcube.datatransfer.resolver.requesthandler.TokenSetter;
import org.gcube.datatransfer.resolver.services.CatalogueResolver;
@ -15,6 +16,7 @@ public class UriResolver extends ResourceConfig {
public UriResolver() {
// Register all resources present under the package.
packages(CatalogueResolver.class.getPackage().getName(), TokenSetter.class.getPackage().getName());
packages(DataMinerInvocation.class.getPackage().getName());
}
}

View File

@ -69,8 +69,11 @@ public class CkanCatalogueConfigurationsReader {
try{
URI toURL = new URI(privatePortletURL);
String publicURL = privatePortletURL.startsWith("https://")?"https://"+toURL.getHost():"http://"+toURL.getHost();
String realiveURLToPublicCtlg = getRelativeURLToCatalogue();
links.setPublicPortletURL(publicURL+"/"+realiveURLToPublicCtlg);
//It returns the string "catalogue"
String prefixToPublicCtlg = getRelativeURLToCatalogue();
//Replacing for example "ckan-bb" with "catalogue-bb"
String publicCatalogueName = extractCatalogueName(privatePortletURL, prefixToPublicCtlg);
links.setPublicPortletURL(publicURL+"/"+publicCatalogueName);
}catch(Exception e){
logger.warn("Erron on generating public catalogue URL from private URL: "+privatePortletURL, e);
}
@ -84,43 +87,31 @@ public class CkanCatalogueConfigurationsReader {
}
return links;
}
/**
* Extract catalogue name.
*
* @param privateCKANCatalogueURL the private ckan catalogue url
* @param replaceCKANWith the replace ckan with
* @return the string
*/
public static String extractCatalogueName(String privateCKANCatalogueURL, String replaceCKANWith){
/*
String privatePortletURL = getPortletUrlForScopeFromIS();
links.setPrivatePortletURL(privatePortletURL);
//Building public URL from private portlet URL
try{
URI toURL = new URI(privatePortletURL);
String publicURL = privatePortletURL.startsWith("https://")?"https://"+toURL.getHost():"http://"+toURL.getHost();
String realiveURLToPublicCtlg = getRelativeURLToCatalogue();
links.setPublicPortletURL(publicURL+"/"+realiveURLToPublicCtlg);
}catch(Exception e){
logger.warn("Erron on generating public catalogue URL from private URL: "+privatePortletURL, e);
}
//Getting the CKAN Portet URL for current scope
try{
String ckanPortletURL = CatalogueServiceEndpointReader.getCatalogueUrl();
links.setCkanURL(ckanPortletURL);
}catch(Exception e){
logger.warn("Erron on getting CKAN Porlet URL for scope: "+ScopeProvider.instance.get(), e);
}
return links;
*/
privateCKANCatalogueURL = privateCKANCatalogueURL.replaceFirst("https://ckan", replaceCKANWith);
privateCKANCatalogueURL = privateCKANCatalogueURL.replaceFirst("http://ckan", replaceCKANWith);
return privateCKANCatalogueURL.substring(0,privateCKANCatalogueURL.indexOf("."));
}
/**
* Retrieve a ckan dataset given its id. The CkanClient is used, without api key. The result is null also when the dataset is private.
* @param datasetIdorName
* @param catalogueURL
* @return
* @throws Exception
*
* @param datasetIdorName the dataset idor name
* @param catalogueURL the catalogue url
* @return the dataset
* @throws Exception the exception
*/
public static CkanDataset getDataset(String datasetIdorName, String catalogueURL) throws Exception{
logger.info("Performing request GET CKAN dataset with id: " + datasetIdorName);

View File

@ -5,15 +5,10 @@ package org.gcube.datatransfer.resolver.services;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.net.URL;
import java.nio.file.Files;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
@ -25,105 +20,67 @@ import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.bind.JAXBException;
import org.apache.commons.io.IOUtils;
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.common.scope.impl.ScopeBean;
import org.gcube.common.scope.impl.ScopeBean.Type;
import org.gcube.common.storagehub.client.dsl.FileContainer;
import org.gcube.common.storagehub.client.dsl.StorageHubClient;
import org.gcube.data.analysis.dminvocation.ActionType;
import org.gcube.data.analysis.dminvocation.DataMinerInvocationManager;
import org.gcube.data.analysis.dminvocation.model.DataMinerInputParams;
import org.gcube.data.analysis.dminvocation.model.DataMinerInvocation;
import org.gcube.data.analysis.dminvocation.model.DataMinerParam;
import org.gcube.data.analysis.dminvocation.model.DataMinerParameters;
import org.gcube.datatransfer.resolver.dataminer.DataMinerRequest;
import org.gcube.datatransfer.resolver.requesthandler.TokenSetter;
import org.gcube.datatransfer.resolver.services.error.ExceptionManager;
import org.gcube.datatransfer.resolver.util.Util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* The Class DataMinerResolver.
* The Class AnalyticsCreateResolver.
*
* @author Francesco Mangiacrapa francesco.mangiacrapa@isti.cnr.it
* Nov 28, 2018
* @author Francesco Mangiacrapa at ISTI-CNR (francesco.mangiacrapa@isti.cnr.it)
* Dec 12, 2018
*/
@Path("/analytics")
public class AnalyticsCreateResolver {
/**
*
*/
private static final String DATAMINER_INVOCATION_MODEL = "dim";
/**
*
*/
private static final String UTF_8 = "UTF-8";
private static Logger logger = LoggerFactory.getLogger(AnalyticsCreateResolver.class);
private static String helpURI = "https://wiki.gcube-system.org/gcube/URI_Resolver#Analitycs_Resolver";
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss.SSS");
/**
* Post catalogue.
*
* @param req the req
* @param jsonRequest the json request
* @param body the body
* @return the response
*/
@POST
@Path("/create")
@Consumes(MediaType.APPLICATION_JSON)
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
public Response postCatalogue(@Context HttpServletRequest req, DataMinerRequest jsonRequest) {
public Response postCatalogue(@Context HttpServletRequest req, String body) {
logger.info(this.getClass().getSimpleName()+" POST starts...");
logger.info("body is: "+body);
logger.info("The body contains the request: "+jsonRequest.toString());
//SIMULATING SMART GEAR BEHAVIOUR - VALIDATING TOKEN
/*String contextToken = req.getParameter("gcube-token");
if(contextToken==null || contextToken.isEmpty()){
contextToken = req.getHeader("gcube-token");
logger.info("Read context token from HTTP header...");
}
if(contextToken==null || contextToken.isEmpty()){
logger.error("Context Token not passed");
ExceptionManager.throwUnauthorizedException(req, "You are not authorized. You must pass a gcube-token of VRE", this.getClass(), helpURI);
}
String scope = "";
DataMinerInvocation jsonRequest = null;
try {
AuthorizationEntry entry = authorizationService().get(contextToken);
scope = entry.getContext();
jsonRequest = DataMinerInvocationManager.getInstance().unmarshaling(IOUtils.toInputStream(body), org.gcube.data.analysis.dminvocation.MediaType.ApplicationJSON, true);
}
catch (Exception e1) {
logger.error("Unresolved token: "+contextToken, e1);
ExceptionManager.throwUnauthorizedException(req, "Your token "+contextToken+" seems not valid, it is unscoped. Have you passed a valid token of VRE?", this.getClass(), helpURI);
catch (IOException | JAXBException | SAXException e1) {
logger.error("The body is not a valid DataMinerInvocation JSON request",e1);
ExceptionManager.throwBadRequestException(req, "Bad 'dataminer-invocation' JSON request: \n"+e1.getCause().getMessage(), this.getClass(), helpURI);
}
String appToken = req.getServletContext().getInitParameter(TokenSetter.ROOT_APP_TOKEN);
if(contextToken.compareTo(appToken)==0){
logger.error("Token not passed, SecurityTokenProvider contains the root app token: "+appToken.substring(0,10)+"...");
ExceptionManager.throwUnauthorizedException(req, "You are not authorized. You must pass a token of VRE", this.getClass(), helpURI);
}
//TOKEN AND SCOPE SHOULD BE VALID SETTINGS THEM
SecurityTokenProvider.instance.set(contextToken);
ScopeProvider.instance.set(scope);
//END SIMULATING SMART GEAR BEHAVIOUR - VALIDATING TOKEN
*/
logger.debug("The body contains the request: "+jsonRequest.toString());
String contextToken = SecurityTokenProvider.instance.get();
String scope = ScopeProvider.instance.get();
logger.info("SecurityTokenProvider contextToken: "+contextToken);
logger.info("ScopeProvider scope: "+scope);
// logger.info("SecurityTokenProvider contextToken: "+contextToken);
logger.info("ScopeProvider has scope: "+scope);
String appToken = req.getServletContext().getInitParameter(TokenSetter.ROOT_APP_TOKEN);
@ -150,55 +107,53 @@ public class AnalyticsCreateResolver {
if(scopeBean.is(Type.VRE)){
String vreName = scopeBean.name();
try {
// try {
String dataminerResolverURL = String.format("%s/%s", Util.getServerURL(req), "dataminer/get");
String dataminerResolverURL = String.format("%s/%s", Util.getServerURL(req), "analytics/get");
//Creating DM invocation file
DataMinerInvocation dmInvocation = new DataMinerInvocation();
dmInvocation.setOperatorId(operatorID);
dmInvocation.setActionType(ActionType.RUN);
if(jsonRequest.getActionType()==null)
jsonRequest.setActionType(ActionType.RUN);
DataMinerInputParams inputParams = new DataMinerInputParams();
List<DataMinerParam> listParam = new ArrayList<DataMinerParam>();
Map<String, String> parameters = jsonRequest.getParameters();
for (String param : parameters.keySet()) {
listParam.add(new DataMinerParam(param, parameters.get(param)));
}
inputParams.setListParam(listParam);
dmInvocation.setParameters(new DataMinerParameters(inputParams, null));
File tempInvocationFile = null;
try {
ByteArrayOutputStream xmlByteArray = DataMinerInvocationManager.marshaling(dmInvocation, org.gcube.data.analysis.dminvocation.MediaType.ApplicationXML);
ByteArrayOutputStream xmlByteArray = DataMinerInvocationManager.getInstance().marshaling(jsonRequest, org.gcube.data.analysis.dminvocation.MediaType.ApplicationXML, true);
String uniqueName = createDMInvocationFileName(jsonRequest.getOperatorId());
tempInvocationFile = createTempFile(uniqueName, ".xml", xmlByteArray.toByteArray());
//CREATE THE FILE ON STORAGE HUB
StorageHubClient shc = new StorageHubClient();
logger.info("Created StorageHubClient Instance, uploading file: "+tempInvocationFile.getName());
FileContainer fileContainer = shc.getWSRoot().uploadFile(new FileInputStream(tempInvocationFile), tempInvocationFile.getName(), "DataMinerInvocation Request created by "+this.getClass().getSimpleName());
logger.info("UPLOADED FILE at: "+fileContainer.getPublicLink());
URL thePublicLink = fileContainer.getPublicLink();
publicLinkToDMInvFile = thePublicLink!=null?thePublicLink.toString():null;
}
catch (IOException | JAXBException e) {
logger.error("Error on creating you request with "+dmInvocation+"", e);
ExceptionManager.throwBadRequestException(req, "Error on creating you request with "+dmInvocation, this.getClass(), helpURI);
catch (Exception e) {
logger.error("Error on creating 'dataminer-invocation:", e);
ExceptionManager.throwBadRequestException(req, "Error on creating your 'dataminer-invocation' request with "+jsonRequest+". \nPlease contact the support", this.getClass(), helpURI);
}finally{
//DELETING THE TEMP FILE
if(tempInvocationFile!=null && tempInvocationFile.exists())
tempInvocationFile.delete();
// if(tempInvocationFile!=null && tempInvocationFile.exists())
// tempInvocationFile.delete();
}
if(publicLinkToDMInvFile==null){
logger.error("Error on creating the public link to file");
ExceptionManager.throwBadRequestException(req, "Error on getting link to your 'dataminer-invocation' request. Plese contact the support "+jsonRequest, this.getClass(), helpURI);
}
String dataMinerURL = String.format("%s/%s?%s=%s", dataminerResolverURL, vreName, DATAMINER_INVOCATION_MODEL, URLEncoder.encode(publicLinkToDMInvFile, UTF_8));
String dataMinerURL = String.format("%s/%s?%s=%s", dataminerResolverURL, vreName, DATAMINER_INVOCATION_MODEL, publicLinkToDMInvFile);
logger.info("Returning Analytics URL: "+dataMinerURL);
return Response.ok(dataMinerURL).header("Location", dataMinerURL).build();
}
catch (UnsupportedEncodingException e) {
logger.error("Encoding error for "+publicLinkToDMInvFile+"", e);
ExceptionManager.throwBadRequestException(req, "Error on encoding the public link "+publicLinkToDMInvFile, this.getClass(), helpURI);
}
return null;
// }
// catch (UnsupportedEncodingException e) {
// logger.error("Encoding error for "+publicLinkToDMInvFile+"", e);
// ExceptionManager.throwBadRequestException(req, "Error on encoding the public link "+publicLinkToDMInvFile, this.getClass(), helpURI);
// }
// return null;
}else{
logger.error("The input scope "+scope+" is not a VRE");
@ -225,7 +180,7 @@ public class AnalyticsCreateResolver {
File file = path.toFile();
// writing sample data
Files.write(path, data);
logger.info("Created the temfile: "+file.getAbsolutePath());
logger.info("Created the Temp File: "+file.getAbsolutePath());
return file;
}
@ -242,8 +197,8 @@ public class AnalyticsCreateResolver {
if(index>0 && index<operatorId.length()){
fileName+="-"+operatorId.substring(index+1,operatorId.length());
}
String currentTimestamp = dateFormat.format(new Date());
fileName+="-"+currentTimestamp;
//String currentTimestamp = dateFormat.format(new Date());
fileName+="-"+System.currentTimeMillis();
return fileName;
}

View File

@ -13,6 +13,7 @@ import javax.ws.rs.PathParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.gcube.datatransfer.resolver.applicationprofile.ApplicationProfileReader;
import org.gcube.datatransfer.resolver.caches.LoadingVREsScopeCache;
import org.gcube.datatransfer.resolver.services.error.ExceptionManager;
import org.slf4j.Logger;
@ -28,22 +29,23 @@ import org.slf4j.LoggerFactory;
@Path("/analytics")
public class AnalyticsGetResolver {
/**
*
*/
private static final String UTF_8 = "UTF-8";
private static Logger logger = LoggerFactory.getLogger(AnalyticsGetResolver.class);
private static String helpURI = "https://wiki.gcube-system.org/gcube/URI_Resolver";
private static final String ORG_GCUBE_PORTLETS_USER_DATAMINERMANAGER_SERVER_DATA_MINER_MANAGER_SERVICE_IMPL =
"org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl";
private static final String APPLICATION_PROFILE = "ApplicationProfile";
private static String helpURI = "https://wiki.gcube-system.org/gcube/URI_Resolver#Analitycs_Resolver";
private static final String ANALYTICS_EXECUTOR_PORTLET_NAME = "Analytics Executor";
/**
* Gets the data miner.
*
* @param req the req
* @param provider the provider
* @param path the path
* @param remainPath the remain path
* @param vreName the vre name
* @return the data miner
*/
@GET
@ -59,13 +61,20 @@ public class AnalyticsGetResolver {
try{
String fullScope = LoadingVREsScopeCache.getCache().get(vreName);
ApplicationProfileReader reader = null;
try{
reader = new ApplicationProfileReader(fullScope, APPLICATION_PROFILE, ORG_GCUBE_PORTLETS_USER_DATAMINERMANAGER_SERVER_DATA_MINER_MANAGER_SERVICE_IMPL, false);
}catch(Exception e){
logger.error("Error on reading the "+APPLICATION_PROFILE+" with APPID: "+ORG_GCUBE_PORTLETS_USER_DATAMINERMANAGER_SERVER_DATA_MINER_MANAGER_SERVICE_IMPL, e);
ExceptionManager.throwInternalErrorException(req, "Error on reading the Application Profile for the "+ANALYTICS_EXECUTOR_PORTLET_NAME+". Please contact the support", this.getClass(), helpURI);
}
//READ THE DATAMINER URL PORTLET FROM APPLICATION PROFRILE IN THE SCOPE fullScope
String dataminerEndPoint = "https://pre.d4science.org/group/prevre/dataminer-manager";
String dataminerEndPoint = reader.getApplicationProfile().getUrl();
String queryString = "";
if(req.getQueryString()!=null && !req.getQueryString().isEmpty()){
queryString+="&"+req.getQueryString();
queryString+=req.getQueryString();
}
String dataMinerResolveURL = String.format("%s?%s", dataminerEndPoint, queryString);

View File

@ -78,7 +78,6 @@ public class StorageHubResolver {
}
try{
StreamDescriptor descriptor = shc.open(id).asFile().downloadSpecificVersion(version);
return Response
.ok(descriptor.getStream())

View File

@ -3,6 +3,6 @@
<group>DataTransfer</group>
<version>1.0.0-SNAPSHOT</version>
<description>URIResolver RESTful</description>
<include>/analytics/create*</include>
<include>/knime/create*</include>
<include>/analytics/create/*</include>
<include>/knime/create/*</include>
</application>

View File

@ -18,6 +18,7 @@ import java.util.Map;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpStatus;
import org.gcube.datatransfer.resolver.applicationprofile.ApplicationProfileReader;
import org.gcube.datatransfer.resolver.util.HTTPCallsUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -409,4 +410,23 @@ public class TestResolvers {
System.out.println("Filename is: "+fileName);
return fileName;
}
/**
*
*/
private static final String ORG_GCUBE_PORTLETS_USER_DATAMINERMANAGER_SERVER_DATA_MINER_MANAGER_SERVICE_IMPL =
"org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl";
/**
*
*/
private static final String APPLICATION_PROFILE = "ApplicationProfile";
/**
*
* @param args
*/
public static void main(String[] args) {
ApplicationProfileReader reader = new ApplicationProfileReader("/gcube/preprod/preVRE", APPLICATION_PROFILE, ORG_GCUBE_PORTLETS_USER_DATAMINERMANAGER_SERVER_DATA_MINER_MANAGER_SERVICE_IMPL, false);
System.out.println(reader);
}
}