This commit is contained in:
parent
6f338b4caf
commit
9014e32c4e
|
@ -2,6 +2,9 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
|
|||
|
||||
# Changelog for org.gcube.data-publishing.gFeed.DataMinerAlgorithmsCrawler
|
||||
|
||||
## [v1.0.5-SNAPSHOT] - 2020-12-15
|
||||
- Fixes [#22344](https://support.d4science.org/issues/22344#change-128440) : publish DM algorithms as Methods
|
||||
|
||||
## [v1.0.4] - 2020-12-15
|
||||
- Dependency management
|
||||
- Naming Convention
|
|
@ -8,7 +8,7 @@
|
|||
</parent>
|
||||
<artifactId>DataMinerAlgorithmsCrawler</artifactId>
|
||||
<name>DataMinerAlgorithmsCrawler</name>
|
||||
<version>1.0.4</version>
|
||||
<version>1.0.5-SNAPSHOT</version>
|
||||
<description>Plugin for gCat-Feeder for DataMiner Algorithms publishing</description>
|
||||
|
||||
<!-- <properties> -->
|
||||
|
@ -56,6 +56,18 @@
|
|||
<artifactId>jersey-media-json-jackson</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data-catalogue</groupId>
|
||||
<artifactId>gcat-client</artifactId>
|
||||
<version>[2.0.0,3.0.0)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.portlets.user</groupId>
|
||||
<artifactId>uri-resolver-manager</artifactId>
|
||||
<version>[1.5.0-SNAPSHOT,2.0.0)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
|
|
|
@ -11,7 +11,8 @@ public class Constants {
|
|||
|
||||
// ENVIRONMENT EXPECTED PARAMETERS
|
||||
public static final String GUI_BASE_URL=ENVIRONMENT_PROPERTIES_BASE+"GUI_BASE_URL";
|
||||
|
||||
public static final String GATEWAY_NAME=ENVIRONMENT_PROPERTIES_BASE+"GATEWAY_NAME";
|
||||
|
||||
public static final String DEFAULT_AUTHOR=ENVIRONMENT_PROPERTIES_BASE+"DEFAULT_AUTHOR";
|
||||
|
||||
public static final String DEFAULT_MAINTAINER=ENVIRONMENT_PROPERTIES_BASE+"DEFAULT_MAINTAINER";
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
package org.gcube.data.publishing.gCatFeeder.collectors.dm;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.time.chrono.IsoChronology;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeFormatterBuilder;
|
||||
import java.time.format.ResolverStyle;
|
||||
import java.time.temporal.ChronoField;
|
||||
import java.util.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
|
||||
import org.gcube.data.analysis.dataminermanagercl.server.DataMinerService;
|
||||
import org.gcube.data.analysis.dataminermanagercl.server.dmservice.SClient;
|
||||
import org.gcube.data.analysis.dataminermanagercl.shared.process.Operator;
|
||||
|
@ -23,16 +26,49 @@ import org.gcube.data.publishing.gCatfeeder.collectors.DataCollector;
|
|||
import org.gcube.data.publishing.gCatfeeder.collectors.model.faults.CollectorFault;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.IllegalArgumentException;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.UriResolverMapException;
|
||||
|
||||
|
||||
@Slf4j
|
||||
public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorithmDescriptor> {
|
||||
|
||||
private static final Pattern p = Pattern.compile("\\{Published by (.*)\\((.*(\\.)?.*\\)).*\\}$");
|
||||
/*
|
||||
* i.e. "Basic statistic max min average {Published by Giancarlo Panichi (giancarlo.panichi) on 2018/07/20 10:24 GMT}"
|
||||
*/
|
||||
|
||||
private static final Pattern descriptionPattern = Pattern.compile("\\{Published by (.*)\\((.*(\\.)?.*\\)) on (.*)\\}$");
|
||||
|
||||
|
||||
static final DateTimeFormatter versionDateParser= DateTimeFormatter.ofPattern("uuuu/MM/dd HH:mm 'GMT'[Z]")
|
||||
.withResolverStyle(ResolverStyle.STRICT)
|
||||
.withLocale(Locale.getDefault())
|
||||
.withZone(ZoneId.systemDefault());
|
||||
|
||||
/* static final DateTimeFormatter versionDateParser=new DateTimeFormatterBuilder()
|
||||
.parseCaseInsensitive().parseLenient()
|
||||
.appendValue(ChronoField.YEAR,4)
|
||||
.appendLiteral('/')
|
||||
.appendValue(ChronoField.MONTH_OF_YEAR,2)
|
||||
.appendLiteral('/')
|
||||
.appendValue(ChronoField.DAY_OF_MONTH,2)
|
||||
.appendLiteral(' ')
|
||||
.appendValue(ChronoField.HOUR_OF_DAY,2)
|
||||
.appendLiteral(':')
|
||||
.appendValue(ChronoField.MINUTE_OF_HOUR,2)
|
||||
.appendLiteral(' ')
|
||||
.appendOffset("+HHMM", "GMT")
|
||||
.toFormatter().withChronology(IsoChronology.INSTANCE)
|
||||
.withResolverStyle(ResolverStyle.SMART); */
|
||||
|
||||
|
||||
|
||||
|
||||
private Map<String,String> env=null;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
public void setEnvironmentConfiguration(EnvironmentConfiguration envConfig) {
|
||||
if(envConfig!=null) {
|
||||
log.debug("Current Environment Configuration is : "+envConfig.getCurrentConfiguration());
|
||||
|
@ -74,7 +110,7 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
String categoryID=cat.getId();
|
||||
String categoryName=cat.getName();
|
||||
|
||||
|
||||
|
||||
desc.setCategoryBriefDescription(categoryBriefDescription);
|
||||
desc.setCategoryID(categoryID);
|
||||
desc.setCategoryName(categoryName);
|
||||
|
@ -83,7 +119,7 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
|
||||
|
||||
// Operator info
|
||||
|
||||
|
||||
String opBriefDescription=op.getBriefDescription();
|
||||
String opDescription=op.getDescription();
|
||||
String opID=op.getId();
|
||||
|
@ -99,7 +135,20 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
desc.setAuthor(parseUser(getAuthor(opDescription)));
|
||||
desc.setMaintainer(parseUser(getAuthor(opDescription)));
|
||||
|
||||
|
||||
|
||||
desc.setVersion(parseDescriptionForDate(opDescription));
|
||||
|
||||
// Try to use version as creation time
|
||||
LocalDateTime toSetCreationDate=null;
|
||||
try{
|
||||
toSetCreationDate=LocalDateTime.parse(desc.getVersion(),versionDateParser);
|
||||
}catch(Throwable t){
|
||||
log.debug("Version {} is not a date. Using Now as Creation date..",desc.getVersion());
|
||||
toSetCreationDate = LocalDateTime.now();
|
||||
}
|
||||
|
||||
desc.setCreationDate(toSetCreationDate);
|
||||
|
||||
|
||||
// Parameters info
|
||||
for(org.gcube.data.analysis.dataminermanagercl.shared.parameters.Parameter param:client.getInputParameters(op)) {
|
||||
|
@ -132,11 +181,17 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
if(guiBasePath!=null)
|
||||
desc.setGuiLink(getGUIBasePath()+"?"+
|
||||
DataMinerCollectorProperties.getProperty(DataMinerCollectorProperties.GUI_PARAM_NAME)+"="+opID);
|
||||
|
||||
|
||||
|
||||
desc.setGatewayName(env.get(Constants.GATEWAY_NAME));
|
||||
|
||||
if(wpsbaseUrl!=null) {
|
||||
desc.setWpsLink(wpsbaseUrl+"?Request=DescribeProcess&Version=1.0.0&Service=WPS"+"&Identifier="+opID);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
desc.setPrivateFlag(Boolean.parseBoolean(env.get(Constants.PRIVATE)));
|
||||
|
||||
toReturn.add(desc);
|
||||
|
@ -164,7 +219,7 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
}
|
||||
|
||||
private String getAuthor(String algorithmDescription) {
|
||||
String toReturn=parseDescription(algorithmDescription);
|
||||
String toReturn=parseDescriptionForUser(algorithmDescription);
|
||||
if(toReturn==null)
|
||||
toReturn=env.get(Constants.DEFAULT_AUTHOR);
|
||||
if(toReturn==null)
|
||||
|
@ -173,35 +228,48 @@ public class DMAlgorithmsInfoCollector implements DataCollector<InternalAlgorith
|
|||
}
|
||||
|
||||
|
||||
private static final UserIdentity parseUser(String userString) {
|
||||
String splitter=null;
|
||||
if(userString.contains(" "))
|
||||
splitter=" ";
|
||||
else if (userString.contains(".")) splitter="\\.";
|
||||
|
||||
String[] splitted=userString.split(splitter);
|
||||
return new UserIdentity(splitted[0], splitted[1], null, null);
|
||||
static final UserIdentity parseUser(String userString) {
|
||||
try{
|
||||
String splitter=null;
|
||||
if(userString.contains(" "))
|
||||
splitter=" ";
|
||||
else if (userString.contains(".")) splitter="\\.";
|
||||
String[] splitted=userString.split(splitter);
|
||||
return new UserIdentity(splitted[0], splitted[1], null, null);
|
||||
}catch(NullPointerException e){
|
||||
System.err.println("Error with userString "+userString);
|
||||
e.printStackTrace(System.err);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* i.e. "Basic statistic max min average {Published by Giancarlo Panichi (giancarlo.panichi) on 2018/07/20 10:24 GMT}"
|
||||
* e.g. "Basic statistic max min average {Published by Giancarlo Panichi (giancarlo.panichi) on 2018/07/20 10:24 GMT}"
|
||||
*/
|
||||
private static final String parseDescription(String description) {
|
||||
Matcher m=p.matcher(description);
|
||||
static final String parseDescriptionForUser(String description) {
|
||||
Matcher m=descriptionPattern.matcher(description);
|
||||
if(m.find())
|
||||
return m.group(1); // group 0 == {...}, group 1 == Giancarlo Panichi, group 2 == giancarlo.panichi
|
||||
else return null;
|
||||
}
|
||||
|
||||
static final String parseDescriptionForDate(String description) {
|
||||
Matcher m=descriptionPattern.matcher(description);
|
||||
if(m.find())
|
||||
return m.group(4); // group 0 == {...}, group 1 == Giancarlo Panichi, group 2 == giancarlo.panichi, group 4 == 2018/07/20 10:24 GMT
|
||||
else return "n/a";
|
||||
}
|
||||
|
||||
|
||||
private final String getWPSBasePath() {
|
||||
static final String getWPSBasePath() {
|
||||
try{
|
||||
ServiceEndpoint se=ISUtils.queryForServiceEndpoints("DataAnalysis", "DataMiner").get(0);
|
||||
return se.profile().accessPoints().iterator().next().address();
|
||||
return ISUtils.queryForServiceEndpointsByName("DataAnalysis", "DataMiner").get(0).profile().
|
||||
accessPoints().iterator().next().address();
|
||||
}catch(Throwable t) {
|
||||
log.warn("Unable to find DM proxy. No WPS URL will be provided",t);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -8,7 +8,9 @@ public class DataMinerCollectorProperties {
|
|||
public static final String DEFAULT_AUTHOR="default_author";
|
||||
public static final String GUI_PARAM_NAME="gui_param_name";
|
||||
public static final String CKAN_RESOURCE_TYPE="ckan_resource_type";
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
package org.gcube.data.publishing.gCatFeeder.collectors.dm;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
import org.gcube.com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.gcube.data.publishing.gCatFeeder.utils.ContextUtils;
|
||||
import org.gcube.portlets.user.uriresolvermanager.UriResolverManager;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.IllegalArgumentException;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.UriResolverMapException;
|
||||
|
||||
public class URIResolver {
|
||||
|
||||
|
||||
private static final String CTLG_RESOLVER_NAME="CTLG";
|
||||
|
||||
// private static final String CATALOGUE_CONTEXT = "gcube_scope";
|
||||
private static final String ENTITY_CONTEXT = "entity_context";
|
||||
private static final String ENTITY_NAME = "entity_name";
|
||||
|
||||
private static final String DATASET = "dataset";
|
||||
|
||||
protected ObjectMapper mapper;
|
||||
|
||||
public URIResolver() {
|
||||
this.mapper = new ObjectMapper();
|
||||
}
|
||||
|
||||
protected StringBuilder getStringBuilder(InputStream inputStream) throws IOException {
|
||||
StringBuilder result = new StringBuilder();
|
||||
try(BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
|
||||
String line;
|
||||
while((line = reader.readLine()) != null) {
|
||||
result.append(line);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
{
|
||||
"gcube_scope" : "/gcube/devsec/devVRE",
|
||||
"entity_context" : "dataset",
|
||||
"entity_name" : "sarda-sarda"
|
||||
}
|
||||
*/
|
||||
public String getCatalogueItemURL(String name) throws UriResolverMapException, IllegalArgumentException {
|
||||
UriResolverManager resolver = new UriResolverManager(CTLG_RESOLVER_NAME);
|
||||
Map<String, String> params = new HashMap<String, String>();
|
||||
params.put(ENTITY_NAME, name);
|
||||
params.put(ENTITY_CONTEXT,DATASET);
|
||||
params.put("gcube_scope", ContextUtils.getCurrentScope());
|
||||
String shortLink = resolver.getLink(params, false);
|
||||
return shortLink;
|
||||
}
|
||||
|
||||
/*public String getCatalogueItemURL(String name) {
|
||||
try {
|
||||
String uriResolverURL = getConfigurationFromIS();
|
||||
|
||||
ObjectNode requestContent = mapper.createObjectNode();
|
||||
requestContent.put(CATALOGUE_CONTEXT, ContextUtils.getCurrentScope());
|
||||
|
||||
requestContent.put(ENTITY_TYPE, DATASET);
|
||||
requestContent.put(ENTITY_NAME, name);
|
||||
|
||||
GXHTTPStringRequest gxhttpStringRequest = GXHTTPStringRequest.newRequest(uriResolverURL);
|
||||
gxhttpStringRequest.from(CTLG_RESOLVER_NAME);
|
||||
gxhttpStringRequest.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON);
|
||||
gxhttpStringRequest.isExternalCall(true);
|
||||
String body = mapper.writeValueAsString(requestContent);
|
||||
HttpURLConnection httpURLConnection = gxhttpStringRequest.post(body);
|
||||
|
||||
if(httpURLConnection.getResponseCode() != 200) {
|
||||
try{
|
||||
IOUtils.copy(httpURLConnection.getInputStream(),System.out);
|
||||
}catch(Throwable t){
|
||||
System.out.println("No message");
|
||||
}
|
||||
throw new InternalServerErrorException("Unable to get Item URL via URI Resolver. Code : "+
|
||||
httpURLConnection.getResponseCode());
|
||||
}
|
||||
|
||||
String url = getStringBuilder(httpURLConnection.getInputStream()).toString();
|
||||
|
||||
return url;
|
||||
} catch(WebApplicationException e) {
|
||||
throw e;
|
||||
} catch(Exception e) {
|
||||
throw new WebApplicationException(e);
|
||||
}
|
||||
}*/
|
||||
|
||||
|
||||
/*
|
||||
static String getConfigurationFromIS() {
|
||||
try {
|
||||
|
||||
StringBuilder toReturn=new StringBuilder();
|
||||
ServiceEndpoint serviceEndpoint = ISUtils.queryForServiceEndpointsByName("Service","HTTP-URI-Resolver").get(0);
|
||||
serviceEndpoint.profile().accessPoints().
|
||||
forEach(a->{
|
||||
if(a.name().equals(CTLG_RESOLVER_NAME))
|
||||
toReturn.append(a.address());
|
||||
});
|
||||
if(toReturn.length()>0) return toReturn.toString();
|
||||
else throw new Exception("Access point for "+CTLG_RESOLVER_NAME+" not found ");
|
||||
} catch(WebApplicationException e) {
|
||||
throw e;
|
||||
} catch(Exception e) {
|
||||
throw new InternalServerErrorException("Error while getting configuration on IS", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
package org.gcube.data.publishing.gCatFeeder.collectors.dm.model;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -25,7 +26,10 @@ public class InternalAlgorithmDescriptor implements CustomData {
|
|||
|
||||
private UserIdentity author;
|
||||
private UserIdentity maintainer;
|
||||
|
||||
|
||||
private String version;
|
||||
private LocalDateTime creationDate;
|
||||
|
||||
//category info
|
||||
|
||||
private String categoryBriefDescription;
|
||||
|
@ -38,8 +42,10 @@ public class InternalAlgorithmDescriptor implements CustomData {
|
|||
private Set<Parameter> outputParameters=new HashSet<>();
|
||||
|
||||
private String guiLink;
|
||||
private String gatewayName;
|
||||
private String wpsLink;
|
||||
|
||||
|
||||
|
||||
private Boolean privateFlag;
|
||||
|
||||
|
||||
|
|
|
@ -2,9 +2,14 @@ package org.gcube.data.publishing.gCatFeeder.collectors.dm.model.ckan;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.net.URL;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeFormatterBuilder;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.DataMinerCollectorProperties;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.URIResolver;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.model.InternalAlgorithmDescriptor;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.model.Parameter;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.model.UserIdentity;
|
||||
|
@ -25,6 +30,9 @@ import lombok.extern.slf4j.Slf4j;
|
|||
@Slf4j
|
||||
public class GCatModel implements CatalogueFormatData {
|
||||
|
||||
private static final DateTimeFormatter dateFormatter=DateTimeFormatter.ISO_DATE;
|
||||
|
||||
|
||||
private static ObjectMapper mapper=new ObjectMapper();
|
||||
|
||||
private static String profileXML=null;
|
||||
|
@ -35,6 +43,21 @@ public class GCatModel implements CatalogueFormatData {
|
|||
profileXML=toSet;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static String getItemUrl(String name) {
|
||||
try{
|
||||
URIResolver uriResolver = new URIResolver();
|
||||
log.debug("Evaluating item url for {}",name);
|
||||
String catalogueItemURL = uriResolver.getCatalogueItemURL(name);
|
||||
log.info("Item URL for {} is {}",name,catalogueItemURL);
|
||||
return catalogueItemURL;
|
||||
}catch(Exception e){
|
||||
log.warn("Unable to evaluate item URL for "+name,e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public GCatModel(InternalAlgorithmDescriptor desc) {
|
||||
item=new CkanItem();
|
||||
// item.setAuthor(desc.getAuthor());
|
||||
|
@ -53,20 +76,52 @@ public class GCatModel implements CatalogueFormatData {
|
|||
|
||||
item.setPrivateFlag(desc.getPrivateFlag());
|
||||
|
||||
for(Parameter param: desc.getInputParameters())
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Input Parameter",
|
||||
String.format("%1$s [%2$s] %3$s : %4$s",
|
||||
/* for(Parameter param: desc.getInputParameters())
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Input Parameter",
|
||||
String.format("%1$s [%2$s] %3$s : %4$s",
|
||||
param.getName(),param.getType(),
|
||||
((param.getValue()!=null&&!param.getValue().isEmpty())?"default : "+param.getValue():""),
|
||||
param.getDescription())));
|
||||
|
||||
|
||||
for(Parameter param: desc.getOutputParameters())
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Output Parameter",
|
||||
String.format("%1$s [%2$s] %3$s : %4$s",
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Output Parameter",
|
||||
String.format("%1$s [%2$s] %3$s : %4$s",
|
||||
param.getName(),param.getType(),
|
||||
((param.getValue()!=null&&!param.getValue().isEmpty())?"default : "+param.getValue():""),
|
||||
param.getDescription())));
|
||||
*/
|
||||
|
||||
item.getExtras().add(new CKanExtraField("Identity:Creator", desc.getAuthor().asStringValue()));
|
||||
|
||||
|
||||
item.getExtras().add(new CKanExtraField("Identity:CreationDate", dateFormatter.format(desc.getCreationDate())));
|
||||
|
||||
item.getExtras().add(new CKanExtraField("AccessMode:UsageMode", "as-a-Service via Blue-Cloud Infrastructure"));
|
||||
item.getExtras().add(new CKanExtraField("AccessMode:Availability", "On-Line"));
|
||||
|
||||
item.getExtras().add(new CKanExtraField("TechnicalDetails:Hosting Environment", "gCube SmartGear"));
|
||||
item.getExtras().add(new CKanExtraField("TechnicalDetails:Dependencies on Other SW", "gCube DataMiner"));
|
||||
|
||||
item.getExtras().add(new CKanExtraField("Rights:Field/Scope of use", "Any use"));
|
||||
item.getExtras().add(new CKanExtraField("Rights:Basic rights", "Communication"));
|
||||
item.getExtras().add(new CKanExtraField("Rights:Basic rights", "Making available to the public"));
|
||||
item.getExtras().add(new CKanExtraField("Rights:Basic rights", "Distribution"));
|
||||
|
||||
item.getExtras().add(new CKanExtraField("Attribution:Attribution requirements",
|
||||
String.format("Cite as: %1$s (%2$d): %3$s. %4$s. %5$s. %6$s. %7$s. Retrieved from the %8$s (%9$s) operated by D4Science.org www.d4science.org",
|
||||
desc.getAuthor().asStringValue(),
|
||||
LocalDateTime.now().getYear(),
|
||||
desc.getName(),
|
||||
desc.getVersion(),
|
||||
"Blue-Cloud",
|
||||
"DataMiner Process",
|
||||
getItemUrl(item.getName()),
|
||||
desc.getGatewayName(),
|
||||
desc.getGuiLink())));
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//Algorithm Description
|
||||
|
@ -77,8 +132,8 @@ public class GCatModel implements CatalogueFormatData {
|
|||
|
||||
// Algorithm Users
|
||||
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Process Author",desc.getAuthor().asStringValue()));
|
||||
item.getExtras().add(new CKanExtraField(profileID+":Process Maintainer",desc.getAuthor().asStringValue()));
|
||||
//item.getExtras().add(new CKanExtraField(profileID+":Process Author",desc.getAuthor().asStringValue()));
|
||||
//item.getExtras().add(new CKanExtraField(profileID+":Process Maintainer",desc.getAuthor().asStringValue()));
|
||||
|
||||
if(desc.getGuiLink()!=null) {
|
||||
try {
|
||||
|
@ -112,23 +167,6 @@ public class GCatModel implements CatalogueFormatData {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* (Common) Title
|
||||
* (Common) Description
|
||||
* (Common) Tags: free list of keywords
|
||||
* (Common) License
|
||||
* (Common) Visibility: either public or private
|
||||
* (Common) Version
|
||||
* (Common) Author: the creator of metadata. Only one occurrence is supported;
|
||||
* (Common) Maintainer:
|
||||
* (Method specific) Creator: the author of the method (with email and ORCID). Repeatable field;
|
||||
* (Method specific) Creation date: when the method has been released;
|
||||
* (Method specific) Input: Repeatable field;
|
||||
* (Method specific) Output: Repeatable field;
|
||||
* (Method specific) RelatedPaper: a reference to an associated paper;
|
||||
* (Method specific) Restrictions On Use: an optional text
|
||||
* (Method specific) Attribution requirements: the text to use to acknowledge method usage;
|
||||
*/
|
||||
|
||||
|
||||
static final String fixTag(String toFix) {
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
default_author=Gianpaolo Coro
|
||||
default_author=Gianpaolo.Coro
|
||||
gui_param_name=OperatorId
|
||||
ckan_resource_type=DataMiner Process
|
||||
ckan_resource_type=Method
|
|
@ -1,41 +1,196 @@
|
|||
<metadataformat type="DataMiner Process">
|
||||
<metadatafield categoryref="DataMiner Process">
|
||||
<fieldName>Input Parameter</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<note>Input parameter expected for the execution of the process</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="DataMiner Process">
|
||||
<fieldName>Output Parameter</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<note>Output parameter expected from the execution of the process
|
||||
</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="DataMiner Process">
|
||||
<fieldName>Process Author</fieldName>
|
||||
<metadataformat type="Method">
|
||||
<metadatafield categoryref="Identity">
|
||||
<fieldName>External Identifier</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<defaultValue />
|
||||
<note>The name of the author, with email and ORCID. The format should
|
||||
be: family, given[, email][, ORCID]. Example: Smith, John,
|
||||
js@acme.org, orcid.org/0000-0002-1825-0097</note>
|
||||
<note>This applies only to methods that have been already published. Insert here a DOI, an handle, and any other Identifier assigned when publishing the dataset alsewhere.</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Identity">
|
||||
<fieldName>Creator</fieldName>
|
||||
<mandatory>true</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<note>The name of the creator, with email and ORCID. The format should be: family, given[, email][, ORCID]. Example: Smith, John, js@acme.org, orcid.org/0000-0002-1825-0097</note>
|
||||
<validator>
|
||||
<regularExpression>^[a-zA-ZÀ-ÿ .'-]+, [a-zA-ZÀ-ÿ .'-]+[,]*([a-zA-Z0-9_!#$%’*+=?`{|}~^.-]+@[a-zA-Z0-9À-ÿ.-]+)?[,]*(orcid.org\/0000-000(1-[5-9]|2-[0-9]|3-[0-4])\d\d\d-\d\d\d[\dX])?$</regularExpression>
|
||||
<regularExpression>^[a-zA-ZÀ-ÿ .'-]+, [a-zA-ZÀ-ÿ .'-]+[, ]*([a-zA-Z0-9_!#$%’*+=?`{|}~^.-]+@[a-zA-Z0-9À-ÿ.-]+)?[, ]*(orcid.org\/0000-000(1-[5-9]|2-[0-9]|3-[0-4])\d\d\d-\d\d\d[\dX])?$</regularExpression>
|
||||
</validator>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="DataMiner Process">
|
||||
<fieldName>Process Maintainer</fieldName>
|
||||
<metadatafield categoryref="Identity">
|
||||
<fieldName>CreationDate</fieldName>
|
||||
<mandatory>true</mandatory>
|
||||
<dataType>Time</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>The date of creation of the dataset (different from the date of registration of the dataset automatically added by the system). Use ISO 8601 Date Format: YYYY-MM-DD[ HH:MM] Ex. 1998-11-10 or 2015-05-29 11:55</note>
|
||||
<validator>
|
||||
<regularExpression>^(\d{4}\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01]))+([ ]+(\d{2}(:?\d{2})?)?)?$</regularExpression>
|
||||
</validator>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Identity">
|
||||
<fieldName>Owner</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<defaultValue />
|
||||
<note>The name of the author, with email and ORCID. The format should
|
||||
be: family, given[, email][, ORCID]. Example: Smith, John,
|
||||
js@acme.org, orcid.org/0000-0002-1825-0097</note>
|
||||
<note>The owner of the method (free text).</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Identity">
|
||||
<fieldName>RelatedPaper</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Insert a complete reference to an associated work.</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Coverage">
|
||||
<fieldName>Semantic Coverage</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<note>Tagging e.g. Functional analysis, Environment analysis and visualisation, ...</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="AccessMode">
|
||||
<fieldName>UsageMode</fieldName>
|
||||
<mandatory>true</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>How the method is expected to be exploited.</note>
|
||||
<vocabulary isMultiSelection="true">
|
||||
<vocabularyField>Download</vocabularyField>
|
||||
<vocabularyField>as-a-Application via Blue-Cloud Infrastructure</vocabularyField>
|
||||
<vocabularyField>as-a-Application via third-party Infrastructure</vocabularyField>
|
||||
<vocabularyField>as-a-Service via Blue-Cloud Infrastructure</vocabularyField>
|
||||
<vocabularyField>as-a-Service via third-party Infrastructure</vocabularyField>
|
||||
</vocabulary>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="AccessMode">
|
||||
<fieldName>Availability</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>How the availability to the resource is offered. On-line means that the method can be executed through the Virtual Laboratory Gateway. On-site means that the method can only be executed by visiting the hosting provider.</note>
|
||||
<vocabulary>
|
||||
<vocabularyField>On-Line</vocabularyField>
|
||||
<vocabularyField>On-Site</vocabularyField>
|
||||
</vocabulary>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="TechnicalDetails">
|
||||
<fieldName>Hosting Environment</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>E.g. Linux, Microsoft Azure, Amazon EC2</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="TechnicalDetails">
|
||||
<fieldName>ProgrammingLanguage</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>The primary language used to implement the method. </note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="TechnicalDetails">
|
||||
<fieldName>Dependencies on Other SW</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<note>E.g. this sowftware requires an Hadoop cluster to run</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="TechnicalDetails">
|
||||
<fieldName>input</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<note>inputParametersType. See WPS specifications</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="TechnicalDetails">
|
||||
<fieldName>output</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>*</maxOccurs>
|
||||
<note>outputType. See WPS specifications </note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>IP/Copyrights</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Whether software is covered by any rights: copyright, related rights, know how, proprietary, etc.</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Field/Scope of use</fieldName>
|
||||
<mandatory>true</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<vocabulary isMultiSelection="true">
|
||||
<vocabularyField>Any use</vocabularyField>
|
||||
<vocabularyField>Non-commercial only</vocabularyField>
|
||||
<vocabularyField>Research only</vocabularyField>
|
||||
<vocabularyField>Non-commercial research only</vocabularyField>
|
||||
<vocabularyField>Private use</vocabularyField>
|
||||
<vocabularyField>Use for developing and providing a service</vocabularyField>
|
||||
</vocabulary>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Basic rights</fieldName>
|
||||
<mandatory>true</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<vocabulary isMultiSelection="true">
|
||||
<vocabularyField>Temporary download of a single copy only</vocabularyField>
|
||||
<vocabularyField>Download</vocabularyField>
|
||||
<vocabularyField>Copying</vocabularyField>
|
||||
<vocabularyField>Distribution</vocabularyField>
|
||||
<vocabularyField>Modification</vocabularyField>
|
||||
<vocabularyField>Communication</vocabularyField>
|
||||
<vocabularyField>Making available to the public</vocabularyField>
|
||||
<vocabularyField>Other rights</vocabularyField>
|
||||
</vocabulary>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Restrictions on use</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Any restrictions on how where the dataset may be used </note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Sublicense rights</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Any restrictions on how where the dataset may be used</note>
|
||||
<vocabulary>
|
||||
<vocabularyField>No</vocabularyField>
|
||||
<vocabularyField>Yes</vocabularyField>
|
||||
</vocabulary>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Requirement of non-disclosure (confidentiality mark)</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Requirement of non-disclosure (confidentiality mark). Whether the dataset bears confidentiality mark/may be used and shared subject to the obligation of non-disclosure</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Rights">
|
||||
<fieldName>Embargo period</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>Time_Interval</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>Period of time during which the resource may be used. Use ISO 8601 Date Format: YYYY-MM-DD[ HH:MM] Ex. 2016-07-31 or 2015-05-10 12:00</note>
|
||||
<validator>
|
||||
<regularExpression>^[a-zA-ZÀ-ÿ .'-]+, [a-zA-ZÀ-ÿ .'-]+[,]*([a-zA-Z0-9_!#$%’*+=?`{|}~^.-]+@[a-zA-Z0-9À-ÿ.-]+)?[,]*(orcid.org\/0000-000(1-[5-9]|2-[0-9]|3-[0-4])\d\d\d-\d\d\d[\dX])?$</regularExpression>
|
||||
<regularExpression>^(\d{4}\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01]))+([ ]+(\d{2}(:?\d{2})?)?)?$</regularExpression>
|
||||
</validator>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Attribution">
|
||||
<fieldName>Attribution requirements</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>The text to acknowledge the resource when using it</note>
|
||||
</metadatafield>
|
||||
<metadatafield categoryref="Attribution">
|
||||
<fieldName>Distribution requirements</fieldName>
|
||||
<mandatory>false</mandatory>
|
||||
<dataType>String</dataType>
|
||||
<maxOccurs>1</maxOccurs>
|
||||
<note>The text to acknowledge the resource when distributing it</note>
|
||||
</metadatafield>
|
||||
</metadataformat>
|
|
@ -0,0 +1,19 @@
|
|||
package org.gcube.data.publishing.gCatFeeder.collectors.dm;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.BaseCollectorTest;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.TokenSetter;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.IllegalArgumentException;
|
||||
import org.gcube.portlets.user.uriresolvermanager.exception.UriResolverMapException;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ItemUrlTests extends BaseCollectorTest {
|
||||
|
||||
@Test
|
||||
public void getItemURL() throws UriResolverMapException, IllegalArgumentException {
|
||||
//SecurityTokenProvider.instance.set("***REMOVED***");
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
String name = "fake";
|
||||
System.out.println(new URIResolver().getCatalogueItemURL(name));
|
||||
}
|
||||
}
|
|
@ -1,13 +1,18 @@
|
|||
package org.gcube.data.publishing.gCatFeeder.collectors.dm;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.model.InternalAlgorithmDescriptor;
|
||||
import org.gcube.data.publishing.gCatFeeder.collectors.dm.model.ckan.GCatModel;
|
||||
import org.gcube.data.publishing.gCatFeeder.model.CatalogueFormatData;
|
||||
import org.gcube.data.publishing.gCatFeeder.model.InternalConversionException;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.BaseCollectorTest;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.TokenSetter;
|
||||
import org.gcube.data.publishing.gCatfeeder.collectors.CollectorPlugin;
|
||||
import org.gcube.data.publishing.gCatfeeder.collectors.DataCollector;
|
||||
import org.gcube.data.publishing.gCatfeeder.collectors.DataTransformer;
|
||||
|
@ -20,12 +25,18 @@ import com.fasterxml.jackson.core.JsonGenerationException;
|
|||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TranslationTest extends BaseCollectorTest{
|
||||
|
||||
|
||||
@Test
|
||||
public void testTranslation() throws CollectorFault, CatalogueNotSupportedException, JsonGenerationException, JsonMappingException, IOException, InternalConversionException {
|
||||
Assume.assumeTrue(isTestInfrastructureEnabled());
|
||||
//Assume.assumeTrue(isTestInfrastructureEnabled());
|
||||
System.out.println("START HERE");
|
||||
//TokenSetter.set("/d4science.research-infrastructures.eu/D4OS/Blue-CloudLab");
|
||||
//SecurityTokenProvider.instance.set("***REMOVED***");
|
||||
TokenSetter.set("/gcube/devsec/devVRE");
|
||||
System.out.println("Entering Infrastructure enabled tests..");
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
|
||||
|
@ -38,12 +49,38 @@ public class TranslationTest extends BaseCollectorTest{
|
|||
System.out.println(mapper.writeValueAsString(obj)+"\n");
|
||||
|
||||
for(String destinationcatalogue : (Set<String>)plugin.getSupportedCatalogueTypes()) {
|
||||
DataTransformer<? extends CatalogueFormatData, InternalAlgorithmDescriptor> transformer=plugin.getTransformerByCatalogueType(destinationcatalogue);
|
||||
DataTransformer<? extends CatalogueFormatData, InternalAlgorithmDescriptor> transformer=
|
||||
plugin.getTransformerByCatalogueType(destinationcatalogue);
|
||||
System.out.println("Starting Transformation "+transformer.toString());
|
||||
|
||||
for(Object data:transformer.transform(collected))
|
||||
System.out.println(((CatalogueFormatData)data).toCatalogueFormat());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testParseDescription(){
|
||||
String s="balablabalba";
|
||||
assertEquals("n/a",DMAlgorithmsInfoCollector.parseDescriptionForDate(s));
|
||||
assertEquals(null, DMAlgorithmsInfoCollector.parseDescriptionForUser(s));
|
||||
s="Basic statistic max min average {Published by Giancarlo Panichi (giancarlo.panichi) on 2018/07/20 10:24 GMT}";
|
||||
assertEquals("2018/07/20 10:24 GMT",DMAlgorithmsInfoCollector.parseDescriptionForDate(s));
|
||||
assertEquals("Giancarlo Panichi ",DMAlgorithmsInfoCollector.parseDescriptionForUser(s));
|
||||
|
||||
|
||||
|
||||
System.out.println(DMAlgorithmsInfoCollector.versionDateParser.format(ZonedDateTime.now()));
|
||||
System.out.println(DMAlgorithmsInfoCollector.versionDateParser.parse(DMAlgorithmsInfoCollector.parseDescriptionForDate(s)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEnvironment(){
|
||||
|
||||
assertNotNull(DMAlgorithmsInfoCollector.getWPSBasePath());
|
||||
assertNotNull(GCatModel.getItemUrl("fake"));
|
||||
System.out.println(getEnvironmentConfiguration().getCurrentConfiguration());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.gcube.data.publishing.gCatFeeder.model.CatalogueFormatData;
|
|||
import org.gcube.data.publishing.gCatFeeder.model.CatalogueInstanceDescriptor;
|
||||
import org.gcube.data.publishing.gCatFeeder.model.InternalConversionException;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.BaseCataloguePluginTest;
|
||||
import org.gcube.data.publishing.gCatFeeder.tests.TokenSetter;
|
||||
import org.gcube.data.publishing.gCataFeeder.catalogues.gCat.GCatPlugin;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -8,7 +8,7 @@
|
|||
</encoder>
|
||||
</appender>
|
||||
|
||||
<logger name="org.gcube" level="DEBUG"/>
|
||||
<logger name="org.gcube.data" level="DEBUG"/>
|
||||
|
||||
<root level="WARN">
|
||||
<appender-ref ref="STDOUT" />
|
||||
|
|
Loading…
Reference in New Issue