Merge branch 'Development' of code-repo.d4science.org:MaDgiK-CITE/argos into Development
Conflicts: dmp-backend/web/src/main/java/eu/eudat/logic/proxy/fetching/RemoteFetcherUtils.java
This commit is contained in:
commit
a3c4b3c143
|
@ -291,6 +291,8 @@ public class DatasetProfileManager {
|
|||
modelDefinition.setGroupId(oldDatasetProfile.getGroupId());
|
||||
modelDefinition.setLanguage(oldDatasetProfile.getLanguage());
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDatasetProfileDao().createOrUpdate(modelDefinition);
|
||||
eu.eudat.data.entities.DatasetProfile datasetProfile = apiContext.getOperationsContext().getDatabaseRepository().getDatasetProfileDao().createOrUpdate(modelDefinition);
|
||||
this.storeDatasetProfileUsers(datasetProfile, profile);
|
||||
return modelDefinition;
|
||||
} else {
|
||||
throw new DatasetProfileNewVersionException("Version to update not the latest.");
|
||||
|
|
|
@ -52,7 +52,7 @@ public class PrefillingManager {
|
|||
PrefillingGet prefillingGet = prefillingConfig.getPrefillingGet();
|
||||
Map<String, Object> prefillingEntity = getSingle(prefillingGet.getUrl(), prefillId);
|
||||
DatasetProfile datasetProfile = apiContext.getOperationsContext().getDatabaseRepository().getDatasetProfileDao().find(profileId);
|
||||
return DatasetWizardModel.fromPrefilledEntity(prefillingEntity, prefillingGet.getMappings(), datasetProfile, datasetManager);
|
||||
return DatasetWizardModel.fromPrefilledEntity(prefillingEntity, prefillingGet, datasetProfile, datasetManager);
|
||||
}
|
||||
|
||||
private Map<String, Object> getSingle(String url, String id) {
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package eu.eudat.logic.proxy.config.entities;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name = "mapping")
|
||||
public class DefaultPrefillingMapping implements PrefillingMapping{
|
||||
private String source;
|
||||
private String target;
|
||||
private String maDmpTarget;
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "source")
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public String getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "target")
|
||||
public void setTarget(String target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getMaDmpTarget() {
|
||||
return maDmpTarget;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "maDmpTarget")
|
||||
public void setMaDmpTarget(String maDmpTarget) {
|
||||
this.maDmpTarget = maDmpTarget;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package eu.eudat.logic.proxy.config.entities;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name = "fixedMapping")
|
||||
public class PrefillingFixedMapping implements PrefillingMapping{
|
||||
private String target;
|
||||
private String maDmpTarget;
|
||||
private String value;
|
||||
|
||||
public String getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "target")
|
||||
public void setTarget(String target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getMaDmpTarget() {
|
||||
return maDmpTarget;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "maDmpTarget")
|
||||
public void setMaDmpTarget(String maDmpTarget) {
|
||||
this.maDmpTarget = maDmpTarget;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "value")
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
|
@ -1,15 +1,13 @@
|
|||
package eu.eudat.logic.proxy.config.entities;
|
||||
|
||||
import eu.eudat.logic.proxy.config.FetchStrategy;
|
||||
import eu.eudat.logic.proxy.config.UrlConfiguration;
|
||||
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlElementWrapper;
|
||||
import java.util.List;
|
||||
|
||||
public class PrefillingGet{
|
||||
private String url;
|
||||
private List<PrefillingMapping> mappings;
|
||||
private List<DefaultPrefillingMapping> mappings;
|
||||
private List<PrefillingFixedMapping> fixedMappings;
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
|
@ -20,13 +18,23 @@ public class PrefillingGet{
|
|||
this.url = url;
|
||||
}
|
||||
|
||||
public List<PrefillingMapping> getMappings() {
|
||||
public List<DefaultPrefillingMapping> getMappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
@XmlElement(name = "mapping")
|
||||
@XmlElementWrapper
|
||||
public void setMappings(List<PrefillingMapping> mappings) {
|
||||
public void setMappings(List<DefaultPrefillingMapping> mappings) {
|
||||
this.mappings = mappings;
|
||||
}
|
||||
|
||||
public List<PrefillingFixedMapping> getFixedMappings() {
|
||||
return fixedMappings;
|
||||
}
|
||||
|
||||
@XmlElement(name = "fixedMapping")
|
||||
@XmlElementWrapper
|
||||
public void setFixedMappings(List<PrefillingFixedMapping> fixedMappings) {
|
||||
this.fixedMappings = fixedMappings;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,38 +1,13 @@
|
|||
package eu.eudat.logic.proxy.config.entities;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAttribute;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name = "mapping")
|
||||
public class PrefillingMapping {
|
||||
private String source;
|
||||
private String target;
|
||||
private String maDmpTarget;
|
||||
public interface PrefillingMapping {
|
||||
String getTarget();
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
void setTarget(String target);
|
||||
|
||||
@XmlAttribute(name = "source")
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
String getMaDmpTarget();
|
||||
|
||||
public String getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "target")
|
||||
public void setTarget(String target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getMaDmpTarget() {
|
||||
return maDmpTarget;
|
||||
}
|
||||
|
||||
@XmlAttribute(name = "maDmpTarget")
|
||||
public void setMaDmpTarget(String maDmpTarget) {
|
||||
this.maDmpTarget = maDmpTarget;
|
||||
}
|
||||
void setMaDmpTarget(String maDmpTarget);
|
||||
}
|
||||
|
|
|
@ -336,23 +336,12 @@ public class RemoteFetcher {
|
|||
if (response.getHeaders().get("Content-Type").get(0).contains("json")) {
|
||||
DocumentContext jsonContext = JsonPath.parse(response.getBody());
|
||||
|
||||
if (jsonDataPath.getFieldsUrlConfiguration().getSource() != null) {
|
||||
results = RemoteFetcherUtils.getFromJsonWithSource(jsonContext, jsonDataPath);
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getCount() != null) { // parsing services.openaire.eu
|
||||
results = RemoteFetcherUtils.getFromJsonWithParsedData(jsonContext, jsonDataPath);
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getPath() != null) {
|
||||
if (jsonDataPath.getFieldsUrlConfiguration().getPath() != null) {
|
||||
results = RemoteFetcherUtils.getFromJsonWithRecursiveFetching(jsonContext, jsonDataPath, this, requestBody, requestType);
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getTypes() != null) {
|
||||
results = RemoteFetcherUtils.getFromJsonWithType(jsonContext, jsonDataPath);
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getFirstName() != null) {
|
||||
results = RemoteFetcherUtils.getFromJsonWithFirstAndLastName(jsonContext, jsonDataPath);
|
||||
} else {
|
||||
results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + (jsonDataPath.getFieldsUrlConfiguration().getName() != null ? jsonDataPath.getFieldsUrlConfiguration().getName(): "")
|
||||
+ (jsonDataPath.getFieldsUrlConfiguration().getDescription() != null ? "," + jsonDataPath.getFieldsUrlConfiguration().getDescription(): "")
|
||||
+ (jsonDataPath.getFieldsUrlConfiguration().getUri() !=null ? "," + jsonDataPath.getFieldsUrlConfiguration().getUri() : "")
|
||||
+ (jsonDataPath.getFieldsUrlConfiguration().getId() != null ? "," + jsonDataPath.getFieldsUrlConfiguration().getId(): "") + "]"),
|
||||
new HashMap<>(1, 1));
|
||||
results = RemoteFetcherUtils.getFromJson(jsonContext, jsonDataPath);
|
||||
}
|
||||
results.setResults(results.getResults().stream().map(e -> e.entrySet().stream().collect(Collectors.toMap(x -> this.transformKey(jsonDataPath,x.getKey()), Map.Entry::getValue)))
|
||||
.collect(Collectors.toList()));
|
||||
|
|
|
@ -1,71 +1,30 @@
|
|||
package eu.eudat.logic.proxy.fetching;
|
||||
|
||||
import com.jayway.jsonpath.Configuration;
|
||||
import com.jayway.jsonpath.DocumentContext;
|
||||
import com.jayway.jsonpath.spi.json.JacksonJsonProvider;
|
||||
import eu.eudat.logic.proxy.config.DataUrlConfiguration;
|
||||
import eu.eudat.logic.proxy.config.ExternalUrlCriteria;
|
||||
import eu.eudat.logic.proxy.fetching.entities.Results;
|
||||
import net.minidev.json.JSONArray;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class RemoteFetcherUtils {
|
||||
private final static Logger logger = LoggerFactory.getLogger(RemoteFetcherUtils.class);
|
||||
|
||||
public static Results getFromJsonWithSource(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
return new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getName() + "," + jsonDataPath.getFieldsUrlConfiguration().getDescription()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getUri() + "," + jsonDataPath.getFieldsUrlConfiguration().getId()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getSource() + "]"),
|
||||
public static Results getFromJson(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
return new Results(parseData(jsonContext, jsonDataPath),
|
||||
new HashMap<>(1, 1));
|
||||
}
|
||||
|
||||
public static Results getFromJsonWithParsedData(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
Results results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getName()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getId() + "]"),
|
||||
new HashMap<>(1, 1));
|
||||
List<Map<String, String>> fixedResults = results.getResults().stream().map(item -> {
|
||||
for (int i = 0; i < 2; i++) {
|
||||
String id;
|
||||
if (i == 0) {
|
||||
id = jsonDataPath.getFieldsUrlConfiguration().getId().replace("'", "");
|
||||
} else {
|
||||
id = jsonDataPath.getFieldsUrlConfiguration().getName().replace("'", "");
|
||||
}
|
||||
if (!(item.get(id) instanceof String)) {
|
||||
Object obj = item.get(id);
|
||||
if (obj instanceof JSONArray) {
|
||||
JSONArray jarr = (JSONArray) obj;
|
||||
if (jarr.get(0) instanceof String) {
|
||||
item.put(id, jarr.get(0).toString());
|
||||
} else {
|
||||
System.out.println("here: " + jarr.size());
|
||||
|
||||
System.out.println(item);
|
||||
for (int j = 0; j < jarr.size(); j++) {
|
||||
System.out.println(jarr.get(j));
|
||||
mapToMap(id, (Map<String, String>)jarr.get(j), item, i == 1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (obj instanceof Map) {
|
||||
mapToMap(id, (Map<String, String>) obj, item, i == 1);
|
||||
} else if (obj != null){
|
||||
item.put(id, obj.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return item;
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
return new Results(fixedResults, new HashMap<>(1, 1));
|
||||
}
|
||||
|
||||
public static Results getFromJsonWithRecursiveFetching(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath, RemoteFetcher remoteFetcher, String requestBody, String requestType) {
|
||||
Results results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getPath()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getHost() + "]"),
|
||||
Results results = new Results(parseData(jsonContext, jsonDataPath),
|
||||
new HashMap<>(1, 1));
|
||||
|
||||
List<Map<String, String>> multiResults = results.getResults().stream().map(result -> {
|
||||
|
@ -78,34 +37,8 @@ public class RemoteFetcherUtils {
|
|||
return new Results(multiResults, new HashMap<>(1, 1));
|
||||
}
|
||||
|
||||
public static Results getFromJsonWithType(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
List<Map<String, Object>> tempRes = jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getId() + "," + jsonDataPath.getFieldsUrlConfiguration().getName()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getTypes() + "," + jsonDataPath.getFieldsUrlConfiguration().getUri() + "]");
|
||||
List<Map<String, String>> finalRes = new ArrayList<>();
|
||||
tempRes.forEach(map -> {
|
||||
Map<String, String> resMap = new HashMap<>();
|
||||
map.forEach((key, value) -> {
|
||||
if (key.equals(jsonDataPath.getFieldsUrlConfiguration().getTypes().substring(1, jsonDataPath.getFieldsUrlConfiguration().getTypes().length() - 1))) {
|
||||
resMap.put("tags", ((JSONArray) value).toJSONString());
|
||||
} else if (key.equals(jsonDataPath.getFieldsUrlConfiguration().getUri().substring(1, jsonDataPath.getFieldsUrlConfiguration().getTypes().length() - 1))) {
|
||||
resMap.put(key, ((JSONArray) value).toJSONString());
|
||||
} else {
|
||||
resMap.put(key, (String) value);
|
||||
}
|
||||
});
|
||||
|
||||
finalRes.add(resMap);
|
||||
});
|
||||
|
||||
return new Results(finalRes,
|
||||
new HashMap<>(1, 1));
|
||||
}
|
||||
|
||||
public static Results getFromJsonWithFirstAndLastName(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
Results results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getId() + "," + jsonDataPath.getFieldsUrlConfiguration().getFirstName()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getLastName() + "]"),
|
||||
Results results = new Results(parseData(jsonContext, jsonDataPath),
|
||||
new HashMap<>(1, 1));
|
||||
results.getResults().stream().forEach(entry -> {
|
||||
String name = entry.get(jsonDataPath.getFieldsUrlConfiguration().getFirstName().replace("'", "")) + " " + entry.get(jsonDataPath.getFieldsUrlConfiguration().getLastName().replace("'", ""));
|
||||
|
@ -116,17 +49,45 @@ public class RemoteFetcherUtils {
|
|||
return results;
|
||||
}
|
||||
|
||||
private static void mapToMap(String key, Map<String, String> source, Map<String, String> destination, boolean isTitle) {
|
||||
if (source != null) {
|
||||
String content = source.get("content");
|
||||
/*if (isTitle) {
|
||||
String classId = source.get("classid");
|
||||
if (classId.equals("main title")) {
|
||||
destination.put(key, content);
|
||||
private static List<Map<String, String>> parseData (DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
|
||||
List <Map<String, Object>> rawData = jsonContext.read(jsonDataPath.getPath());
|
||||
List<Map<String, String>> parsedData = new ArrayList<>();
|
||||
rawData.forEach(stringObjectMap -> {
|
||||
parsedData.add(new LinkedHashMap<>());
|
||||
Arrays.stream(jsonDataPath.getFieldsUrlConfiguration().getClass().getDeclaredFields()).forEach(field -> {
|
||||
String getterMethodName = "get" + field.getName().substring(0, 1).toUpperCase(Locale.ROOT) + field.getName().substring(1);
|
||||
Method getterMethod = Arrays.stream(jsonDataPath.getFieldsUrlConfiguration().getClass().getDeclaredMethods()).filter(method -> method.getName().equals(getterMethodName)).collect(Collectors.toList()).get(0);
|
||||
try {
|
||||
String value = ((String) getterMethod.invoke(jsonDataPath.getFieldsUrlConfiguration()));
|
||||
if (value != null) {
|
||||
value = value.replace("'", "");
|
||||
if (stringObjectMap.containsKey(value)) {
|
||||
parsedData.get(parsedData.size() - 1).put(field.getName().equals("types") ? "tags" : value, normalizeValue(stringObjectMap.get(value), (field.getName().equals("types") || field.getName().equals("uri"))));
|
||||
}
|
||||
}
|
||||
} catch (IllegalAccessException | InvocationTargetException e) {
|
||||
logger.error(e.getLocalizedMessage(), e);
|
||||
}
|
||||
} else {*/
|
||||
destination.put(key, content);
|
||||
// }
|
||||
});
|
||||
});
|
||||
return parsedData;
|
||||
}
|
||||
|
||||
private static String normalizeValue(Object value, boolean jsonString) {
|
||||
if (value instanceof JSONArray) {
|
||||
if (jsonString) {
|
||||
return ((JSONArray)value).toJSONString();
|
||||
}
|
||||
JSONArray jarr = (JSONArray) value;
|
||||
if (jarr.get(0) instanceof String) {
|
||||
return jarr.get(0).toString();
|
||||
} else {
|
||||
return ((Map<String, String>)jarr.get(0)).get("content");
|
||||
|
||||
}
|
||||
} else if (value instanceof Map) {
|
||||
return ((Map<String, String>)value).get("content");
|
||||
}
|
||||
return value != null ? value.toString() : null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ public class WordBuilder {
|
|||
this.options.put(ParagraphStyle.COMMENT, (mainDocumentPart, item) -> {
|
||||
XWPFParagraph paragraph = mainDocumentPart.createParagraph();
|
||||
XWPFRun run = paragraph.createRun();
|
||||
run.setText(item);
|
||||
run.setText(" " + item);
|
||||
run.setItalic(true);
|
||||
return paragraph;
|
||||
});
|
||||
|
@ -178,7 +178,7 @@ public class WordBuilder {
|
|||
number.setVal(BigInteger.valueOf(indent));
|
||||
paragraphPos = mainDocumentPart.getPosOfParagraph(paragraph);
|
||||
}
|
||||
createSections(section.getSections(), mainDocumentPart, ParagraphStyle.HEADER5, 1, createListing, visibilityRuleService, page, tempSectionString);
|
||||
createSections(section.getSections(), mainDocumentPart, ParagraphStyle.HEADER4, 1, createListing, visibilityRuleService, page, tempSectionString);
|
||||
hasValue = createCompositeFields(section.getCompositeFields(), mainDocumentPart, 2, createListing, visibilityRuleService, page, tempSectionString);
|
||||
|
||||
if (!hasValue && paragraphPos > -1) {
|
||||
|
@ -193,18 +193,26 @@ public class WordBuilder {
|
|||
boolean hasValue = false;
|
||||
for (FieldSet compositeField: compositeFields) {
|
||||
if (visibilityRuleService.isElementVisible(compositeField.getId()) && hasVisibleFields(compositeField, visibilityRuleService)) {
|
||||
char c = 'a';
|
||||
int paragraphPos = -1;
|
||||
if (compositeField.getTitle() != null && !compositeField.getTitle().isEmpty() && !createListing) {
|
||||
XWPFParagraph paragraph = addParagraphContent(page + "." + section + "." + (compositeField.getOrdinal() +1) + " " + compositeField.getTitle(), mainDocumentPart, ParagraphStyle.HEADER6, numId);
|
||||
CTDecimalNumber number = paragraph.getCTP().getPPr().getNumPr().addNewIlvl();
|
||||
number.setVal(BigInteger.valueOf(indent));
|
||||
paragraphPos = mainDocumentPart.getPosOfParagraph(paragraph);
|
||||
if(compositeField.getMultiplicityItems() != null && !compositeField.getMultiplicityItems().isEmpty()){
|
||||
addParagraphContent(c + ".\n", mainDocumentPart, ParagraphStyle.HEADER6, numId);
|
||||
}
|
||||
}
|
||||
hasValue = createFields(compositeField.getFields(), mainDocumentPart, 3, createListing, visibilityRuleService);
|
||||
if (compositeField.getMultiplicityItems() != null && !compositeField.getMultiplicityItems().isEmpty()) {
|
||||
List<FieldSet> list = compositeField.getMultiplicityItems().stream().sorted(Comparator.comparingInt(FieldSet::getOrdinal)).collect(Collectors.toList());
|
||||
for (FieldSet multiplicityFieldset : list) {
|
||||
hasValue = createFields(multiplicityFieldset.getFields(), mainDocumentPart, 3, createListing, visibilityRuleService);
|
||||
if(!createListing){
|
||||
c++;
|
||||
addParagraphContent(c + ".\n", mainDocumentPart, ParagraphStyle.HEADER6, numId);
|
||||
}
|
||||
hasValue = createFields(multiplicityFieldset.getFields(), mainDocumentPart, 3, createListing, visibilityRuleService);
|
||||
}
|
||||
}
|
||||
if (hasValue && compositeField.getHasCommentField() && compositeField.getCommentFieldValue() != null && !compositeField.getCommentFieldValue().isEmpty() && !createListing) {
|
||||
|
@ -224,13 +232,31 @@ public class WordBuilder {
|
|||
if (createListing) this.addListing(mainDocumentPart, indent, false, false);
|
||||
boolean hasValue = false;
|
||||
List<Field> tempFields = fields.stream().sorted(Comparator.comparingInt(Field::getOrdinal)).collect(Collectors.toList());
|
||||
List<Field> formats = tempFields.stream().filter(f -> {
|
||||
try {
|
||||
String fTemp = this.formatter(f);
|
||||
return fTemp != null && !fTemp.isEmpty();
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
return false;
|
||||
}).collect(Collectors.toList());
|
||||
for (Field field: tempFields) {
|
||||
if (visibilityRuleService.isElementVisible(field.getId())) {
|
||||
if (!createListing) {
|
||||
try {
|
||||
if (field.getValue() != null && !field.getValue().toString().isEmpty()) {
|
||||
this.indent = indent;
|
||||
XWPFParagraph paragraph = addParagraphContent(this.formatter(field), mainDocumentPart, field.getViewStyle().getRenderStyle().equals("richTextarea") ? ParagraphStyle.HTML : ParagraphStyle.TEXT, numId);
|
||||
String format = this.formatter(field);
|
||||
if(format != null){
|
||||
if(format.charAt(0) == '['){
|
||||
format = format.substring(1, format.length() - 1).replaceAll(",", ", ");
|
||||
}
|
||||
if(formats.size() > 1){
|
||||
format = "\t• " + format;
|
||||
}
|
||||
}
|
||||
XWPFParagraph paragraph = addParagraphContent(format, mainDocumentPart, field.getViewStyle().getRenderStyle().equals("richTextarea") ? ParagraphStyle.HTML : ParagraphStyle.TEXT, numId);
|
||||
if (paragraph != null) {
|
||||
CTDecimalNumber number = paragraph.getCTP().getPPr().getNumPr().addNewIlvl();
|
||||
number.setVal(BigInteger.valueOf(indent));
|
||||
|
|
|
@ -4,10 +4,10 @@ public class JavaToJson {
|
|||
|
||||
public static String objectStringToJson(String object) {
|
||||
String result = object.replaceAll("=", "\":\"")
|
||||
.replaceAll("\\{", "{\"")
|
||||
//.replaceAll("\\{", "{\"")
|
||||
.replaceAll(", ", "\", \"")
|
||||
.replaceAll("}", "\"}" ).
|
||||
replaceAll("}\", \"\\{", "}, {");
|
||||
//.replaceAll("}", "\"}" ).
|
||||
.replaceAll("}\", \"\\{", "}, {");
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
package eu.eudat.models.data.datasetwizard;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.data.entities.*;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.logic.managers.DatasetManager;
|
||||
import eu.eudat.logic.proxy.config.entities.PrefillingFixedMapping;
|
||||
import eu.eudat.logic.proxy.config.entities.PrefillingGet;
|
||||
import eu.eudat.logic.proxy.config.entities.DefaultPrefillingMapping;
|
||||
import eu.eudat.logic.proxy.config.entities.PrefillingMapping;
|
||||
import eu.eudat.logic.utilities.json.JsonSearcher;
|
||||
import eu.eudat.models.DataModel;
|
||||
|
@ -20,7 +24,6 @@ import net.minidev.json.JSONValue;
|
|||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
||||
|
@ -305,7 +308,7 @@ public class DatasetWizardModel implements DataModel<Dataset, DatasetWizardModel
|
|||
return entity;
|
||||
}
|
||||
|
||||
public static DatasetWizardModel fromPrefilledEntity(Map<String, Object> prefilledEntity, List<PrefillingMapping> mappings,
|
||||
public static DatasetWizardModel fromPrefilledEntity(Map<String, Object> prefilledEntity, PrefillingGet prefillingGet,
|
||||
DatasetProfile profile, DatasetManager datasetManager) throws Exception {
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel();
|
||||
datasetWizardModel.setProfile(new DatasetProfileOverviewModel().fromDataModel(profile));
|
||||
|
@ -314,7 +317,7 @@ public class DatasetWizardModel implements DataModel<Dataset, DatasetWizardModel
|
|||
Map<String, Object> properties = new HashMap<>();
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
JsonNode parentNode = objectMapper.readTree(objectMapper.writeValueAsString(datasetManager.getPagedProfile(datasetWizardModel, dataset)));
|
||||
for (PrefillingMapping prefillingMapping: mappings) {
|
||||
for (DefaultPrefillingMapping prefillingMapping: prefillingGet.getMappings()) {
|
||||
List<String> sourceKeys = Arrays.asList(prefillingMapping.getSource().split("\\."));
|
||||
Object sourceValue = null;
|
||||
for (String sourceKey: sourceKeys) {
|
||||
|
@ -324,27 +327,45 @@ public class DatasetWizardModel implements DataModel<Dataset, DatasetWizardModel
|
|||
sourceValue = ((Map)sourceValue).get(sourceKey);
|
||||
}
|
||||
}
|
||||
if (prefillingMapping.getTarget() != null) {
|
||||
try {
|
||||
String methodName = "set" + prefillingMapping.getTarget().substring(0, 1).toUpperCase(Locale.ROOT) + prefillingMapping.getTarget().substring(1);
|
||||
Method setterMethod = DatasetWizardModel.class.getMethod(methodName, String.class);
|
||||
setterMethod.invoke(datasetWizardModel, sourceValue);
|
||||
}catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
} else {
|
||||
List<JsonNode> nodes = JsonSearcher.findNodes(parentNode, "rdaProperty", prefillingMapping.getMaDmpTarget());
|
||||
for (JsonNode node: nodes) {
|
||||
String id = node.get(0) != null ? node.get(0).get("id").asText() : node.get("id").asText();
|
||||
properties.put(id, sourceValue);
|
||||
}
|
||||
}
|
||||
setValue(prefillingMapping, objectMapper.writeValueAsString(sourceValue), datasetWizardModel, parentNode, properties);
|
||||
}
|
||||
for (PrefillingFixedMapping fixedMapping: prefillingGet.getFixedMappings()) {
|
||||
setValue(fixedMapping, fixedMapping.getValue(), datasetWizardModel, parentNode, properties);
|
||||
}
|
||||
dataset.setProperties(objectMapper.writeValueAsString(properties));
|
||||
datasetWizardModel.setDatasetProfileDefinition(datasetManager.getPagedProfile(datasetWizardModel, dataset));
|
||||
return datasetWizardModel;
|
||||
}
|
||||
|
||||
private static void setValue(PrefillingMapping prefillingMapping, String value, DatasetWizardModel datasetWizardModel, JsonNode parentNode, Map<String, Object> properties) throws InvocationTargetException, IllegalAccessException, JsonProcessingException {
|
||||
if (prefillingMapping.getTarget() != null) {
|
||||
try {
|
||||
String methodName = "set" + prefillingMapping.getTarget().substring(0, 1).toUpperCase(Locale.ROOT) + prefillingMapping.getTarget().substring(1);
|
||||
Method setterMethod = Arrays.stream(DatasetWizardModel.class.getDeclaredMethods())
|
||||
.filter(method -> method.getName().equals(methodName)).collect(Collectors.toList()).get(0);
|
||||
Class<?>[] params = setterMethod.getParameterTypes();
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
//GK: Tags Special logic
|
||||
if (prefillingMapping.getTarget().equals("tags")) {
|
||||
List<Object> rawTags = (List<Object>) mapper.readValue(value, params[0]);
|
||||
if (rawTags.get(0) instanceof String) {
|
||||
List<Tag> parsedTags = rawTags.stream().map(rawTag -> new Tag((String) rawTag, (String) rawTag)).collect(Collectors.toList());
|
||||
value = mapper.writeValueAsString(parsedTags);
|
||||
}
|
||||
}
|
||||
setterMethod.invoke(datasetWizardModel, mapper.readValue(value, params[0]));
|
||||
}catch (InvocationTargetException | IllegalAccessException | JsonProcessingException e) {
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
List<JsonNode> nodes = JsonSearcher.findNodes(parentNode, "rdaProperty", prefillingMapping.getMaDmpTarget());
|
||||
for (JsonNode node: nodes) {
|
||||
String id = node.get(0) != null ? node.get(0).get("id").asText() : node.get("id").asText();
|
||||
properties.put(id, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHint() {
|
||||
return "datasetWizardModel";
|
||||
|
|
|
@ -1,10 +1,19 @@
|
|||
package eu.eudat.models.rda.mapper;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.logic.utilities.json.JavaToJson;
|
||||
import eu.eudat.models.rda.Cost;
|
||||
import eu.eudat.models.rda.PidSystem;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CostRDAMapper {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DatasetRDAMapper.class);
|
||||
|
||||
public static Cost toRDA(Map<String, Object> cost) {
|
||||
Cost rda = new Cost();
|
||||
|
@ -19,4 +28,69 @@ public class CostRDAMapper {
|
|||
return rda;
|
||||
}
|
||||
|
||||
public static List<Cost> toRDAList(List<JsonNode> nodes) throws JsonProcessingException {
|
||||
Map<String, Cost> rdaMap = new HashMap<>();
|
||||
for(JsonNode node: nodes){
|
||||
String rdaProperty = node.get("rdaProperty").asText();
|
||||
String rdaValue = node.get("value").asText();
|
||||
if(rdaValue == null || (rdaValue.isEmpty() && !node.get("value").isArray())){
|
||||
continue;
|
||||
}
|
||||
String key = node.get("numbering").asText();
|
||||
if(!key.contains("mult")){
|
||||
key = "0";
|
||||
}
|
||||
else{
|
||||
key = "" + key.charAt(4);
|
||||
}
|
||||
Cost rda;
|
||||
if(rdaMap.containsKey(key)){
|
||||
rda = rdaMap.get(key);
|
||||
}
|
||||
else{
|
||||
rda = new Cost();
|
||||
rdaMap.put(key, rda);
|
||||
}
|
||||
if(rdaProperty.contains("value")){
|
||||
rda.setValue(Double.valueOf(rdaValue));
|
||||
}
|
||||
else if(rdaProperty.contains("currency_code")){
|
||||
String json = JavaToJson.objectStringToJson(rdaValue);
|
||||
HashMap<String,String> result =
|
||||
new ObjectMapper().readValue(json, HashMap.class);
|
||||
rda.setCurrencyCode(Cost.CurrencyCode.fromValue(result.get("value")));
|
||||
}
|
||||
else if(rdaProperty.contains("title")){
|
||||
Iterator<JsonNode> iter = node.get("value").elements();
|
||||
StringBuilder title = new StringBuilder();
|
||||
while(iter.hasNext()){
|
||||
String next = iter.next().asText();
|
||||
if(!next.equals("Other")) {
|
||||
title.append(next).append(", ");
|
||||
}
|
||||
}
|
||||
if(title.length() > 2){
|
||||
rda.setTitle(title.substring(0, title.length() - 2));
|
||||
}
|
||||
else{
|
||||
String t = rda.getTitle();
|
||||
if(t == null){ // only other as title
|
||||
rda.setTitle(rdaValue);
|
||||
}
|
||||
else{ // option + other
|
||||
rda.setTitle(t + ", " + rdaValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(rdaProperty.contains("description")){
|
||||
rda.setDescription(rdaValue);
|
||||
}
|
||||
}
|
||||
List<Cost> rdaList = rdaMap.values().stream()
|
||||
.filter(cost -> cost.getTitle() != null)
|
||||
.collect(Collectors.toList());
|
||||
return rdaList;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
package eu.eudat.models.rda.mapper;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.logic.managers.DatasetManager;
|
||||
import eu.eudat.logic.services.ApiContext;
|
||||
import eu.eudat.logic.utilities.json.JavaToJson;
|
||||
import eu.eudat.logic.utilities.json.JsonSearcher;
|
||||
import eu.eudat.models.data.datasetprofile.DatasetProfileOverviewModel;
|
||||
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
|
||||
import eu.eudat.models.rda.Contributor;
|
||||
import eu.eudat.models.rda.Dataset;
|
||||
import eu.eudat.models.rda.DatasetId;
|
||||
import eu.eudat.models.rda.Language;
|
||||
import eu.eudat.models.rda.*;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -20,6 +19,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.net.URI;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
|
@ -43,7 +43,7 @@ public class DatasetRDAMapper {
|
|||
}
|
||||
|
||||
@Transactional
|
||||
public Dataset toRDA(eu.eudat.data.entities.Dataset dataset, List<Contributor> contributors) {
|
||||
public Dataset toRDA(eu.eudat.data.entities.Dataset dataset, eu.eudat.models.rda.Dmp dmp) {
|
||||
Dataset rda = new Dataset();
|
||||
// rda.setDatasetId(DatasetIdRDAMapper.toRDA(dataset.getId()));
|
||||
if (dataset.getLabel() == null) {
|
||||
|
@ -91,10 +91,27 @@ public class DatasetRDAMapper {
|
|||
for (int i = 0; i < qaNodes.size(); i++) {
|
||||
rda.setAdditionalProperty("qaId" + (i + 1), qaNodes.get(i).get("id").asText());
|
||||
}*/
|
||||
List<String> qaList = qaNodes.stream()
|
||||
.map(qaNode -> qaNode.get("value").asText())
|
||||
.filter(qaNode -> !qaNode.isEmpty())
|
||||
.collect(Collectors.toList());
|
||||
List<String> qaList = new ArrayList<>();
|
||||
String qa;
|
||||
for(JsonNode node: qaNodes){
|
||||
if(node.get("value").isArray()){
|
||||
Iterator<JsonNode> iter = node.get("value").elements();
|
||||
while(iter.hasNext()) {
|
||||
qa = iter.next().asText();
|
||||
qaList.add(qa);
|
||||
}
|
||||
}
|
||||
}
|
||||
String data_quality;
|
||||
for(JsonNode dqa: qaNodes){
|
||||
data_quality = dqa.get("value").asText();
|
||||
if(!data_quality.isEmpty()){
|
||||
qaList.add(data_quality);
|
||||
rda.setAdditionalProperty("otherDQAID", dqa.get("id"));
|
||||
rda.setAdditionalProperty("otherDQA", data_quality);
|
||||
break;
|
||||
}
|
||||
}
|
||||
rda.setDataQualityAssurance(qaList);
|
||||
}else{
|
||||
rda.setDataQualityAssurance(new ArrayList<>());
|
||||
|
@ -128,7 +145,11 @@ public class DatasetRDAMapper {
|
|||
}
|
||||
List<JsonNode> personalDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.personal_data");
|
||||
if (!personalDataNodes.isEmpty()) {
|
||||
rda.setPersonalData(personalDataNodes.stream().map(personalDataNode -> Dataset.PersonalData.fromValue(personalDataNode.get("value").asText())).findFirst().get());
|
||||
try{
|
||||
rda.setPersonalData(personalDataNodes.stream().map(personalDataNode -> Dataset.PersonalData.fromValue(personalDataNode.get("value").asText())).findFirst().get());
|
||||
}catch(IllegalArgumentException e){
|
||||
rda.setPersonalData(Dataset.PersonalData.UNKNOWN);
|
||||
}
|
||||
} else {
|
||||
rda.setPersonalData(Dataset.PersonalData.UNKNOWN);
|
||||
}
|
||||
|
@ -140,7 +161,11 @@ public class DatasetRDAMapper {
|
|||
}
|
||||
List<JsonNode> sensitiveDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.sensitive_data");
|
||||
if (!sensitiveDataNodes.isEmpty()) {
|
||||
rda.setSensitiveData(sensitiveDataNodes.stream().map(sensitiveDataNode -> Dataset.SensitiveData.fromValue(sensitiveDataNode.get("value").asText())).findFirst().get());
|
||||
try{
|
||||
rda.setSensitiveData(sensitiveDataNodes.stream().map(sensitiveDataNode -> Dataset.SensitiveData.fromValue(sensitiveDataNode.get("value").asText())).findFirst().get());
|
||||
}catch(IllegalArgumentException e){
|
||||
rda.setSensitiveData(Dataset.SensitiveData.UNKNOWN);
|
||||
}
|
||||
} else {
|
||||
rda.setSensitiveData(Dataset.SensitiveData.UNKNOWN);
|
||||
}
|
||||
|
@ -156,15 +181,58 @@ public class DatasetRDAMapper {
|
|||
}
|
||||
List<JsonNode> contributorNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.contributor");
|
||||
if (!contributorNodes.isEmpty()) {
|
||||
contributors.addAll(contributorNodes.stream().map(contributorNode -> {
|
||||
dmp.getContributor().addAll(contributorNodes.stream().map(contributorNode -> {
|
||||
JsonNode value = contributorNode.get("value");
|
||||
if (value.isArray()) {
|
||||
return StreamSupport.stream(value.spliterator(), false).map(node -> ContributorRDAMapper.toRDA(node.asText())).collect(Collectors.toList());
|
||||
} else {
|
||||
return Collections.singletonList(new Contributor());
|
||||
return Collections.singletonList(new Contributor()); // return null kalutera
|
||||
}
|
||||
}).flatMap(Collection::stream).collect(Collectors.toList()));
|
||||
}
|
||||
List<JsonNode> costNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.cost");
|
||||
if (!costNodes.isEmpty()) {
|
||||
dmp.getCost().addAll(CostRDAMapper.toRDAList(costNodes));
|
||||
}
|
||||
List<JsonNode> ethicsNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.ethical_issues");
|
||||
if (!ethicsNodes.isEmpty()) {
|
||||
for(JsonNode node: ethicsNodes){
|
||||
String rdaProperty = node.get("rdaProperty").asText();
|
||||
String rdaValue = node.get("value").asText();
|
||||
if(rdaValue == null || rdaValue.isEmpty()){
|
||||
continue;
|
||||
}
|
||||
if(rdaProperty.contains("exist")){
|
||||
try {
|
||||
Dmp.EthicalIssuesExist exists = dmp.getEthicalIssuesExist();
|
||||
if(exists == null
|
||||
|| ((exists == Dmp.EthicalIssuesExist.NO || exists == Dmp.EthicalIssuesExist.UNKNOWN) && rdaValue.equals("yes"))
|
||||
|| (exists == Dmp.EthicalIssuesExist.YES && !(rdaValue.equals("no") || rdaValue.equals("unknown")))
|
||||
|| (exists == Dmp.EthicalIssuesExist.UNKNOWN && rdaValue.equals("no"))){
|
||||
dmp.setEthicalIssuesExist(Dmp.EthicalIssuesExist.fromValue(rdaValue));
|
||||
}
|
||||
}catch(IllegalArgumentException e){
|
||||
logger.warn(e.getLocalizedMessage() + ". Setting ethical_issues_exist to unknown");
|
||||
dmp.setEthicalIssuesExist(Dmp.EthicalIssuesExist.UNKNOWN);
|
||||
}
|
||||
}
|
||||
// else if(rdaProperty.contains("description")){
|
||||
// if(dmp.getEthicalIssuesDescription() == null){
|
||||
// dmp.setEthicalIssuesDescription(rdaValue);
|
||||
// }
|
||||
// else{
|
||||
// dmp.setEthicalIssuesDescription(dmp.getEthicalIssuesDescription() + ", " + rdaValue);
|
||||
// }
|
||||
// }
|
||||
// else if(rdaProperty.contains("report")){
|
||||
// try {
|
||||
// dmp.setEthicalIssuesReport(URI.create(rdaValue));
|
||||
// } catch (IllegalArgumentException e) {
|
||||
// logger.warn(e.getLocalizedMessage() + ". Skipping url parsing");
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
List<JsonNode> foundNodes = Stream.of(typeNodes, languageNodes, metadataNodes, qaNodes, preservationNodes, distributionNodes,
|
||||
keywordNodes, personalDataNodes, securityAndPrivacyNodes, sensitiveDataNodes, technicalResourceNodes).flatMap(Collection::stream).collect(Collectors.toList());
|
||||
templateIdsToValues.entrySet().forEach(entry -> {
|
||||
|
@ -233,7 +301,14 @@ public class DatasetRDAMapper {
|
|||
}*/
|
||||
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.data_quality_assurance");
|
||||
if (!qaNodes.isEmpty() && rda.getDataQualityAssurance() != null && !rda.getDataQualityAssurance().isEmpty()) {
|
||||
properties.put(qaNodes.get(0).get("id").asText(), rda.getDataQualityAssurance().get(0));
|
||||
ObjectMapper m = new ObjectMapper();
|
||||
List<String> qas = new ArrayList<>(rda.getDataQualityAssurance());
|
||||
if(!qas.isEmpty()){
|
||||
properties.put(qaNodes.get(0).get("id").asText(), m.writeValueAsString(qas));
|
||||
if(rda.getAdditionalProperties().containsKey("otherDQAID")){
|
||||
properties.put((String)rda.getAdditionalProperties().get("otherDQAID"), rda.getAdditionalProperties().get("otherDQA"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.preservation_statement");
|
||||
|
@ -246,25 +321,28 @@ public class DatasetRDAMapper {
|
|||
properties.put(issuedNodes.get(0).get("id").asText(), rda.getIssued());
|
||||
}
|
||||
|
||||
if (rda.getDistribution() != null) {
|
||||
if (rda.getDistribution() != null && !rda.getDistribution().isEmpty()) {
|
||||
properties.putAll(DistributionRDAMapper.toProperties(rda.getDistribution().get(0), datasetDescriptionObj));
|
||||
}
|
||||
|
||||
if (rda.getKeyword() != null) {
|
||||
List<String> keywordIds = rda.getAdditionalProperties().entrySet().stream().filter(entry -> entry.getKey().startsWith("keyword")).map(entry -> entry.getValue().toString()).collect(Collectors.toList());
|
||||
boolean takeAll = false;
|
||||
if (keywordIds.size() < rda.getKeyword().size()) {
|
||||
takeAll = true;
|
||||
}
|
||||
for (int i = 0; i < keywordIds.size(); i++) {
|
||||
if (takeAll) {
|
||||
List<String> tags = new ArrayList<>();
|
||||
for (String keyword : rda.getKeyword()) {
|
||||
tags.add(mapper.writeValueAsString(toTagEntity(keyword)));
|
||||
}
|
||||
properties.put(keywordIds.get(i), tags);
|
||||
} else {
|
||||
properties.put(keywordIds.get(i), mapper.writeValueAsString(toTagEntity(rda.getKeyword().get(i))));
|
||||
// boolean takeAll = false;
|
||||
// if (keywordIds.size() < rda.getKeyword().size()) {
|
||||
// takeAll = true;
|
||||
// }
|
||||
if(!rda.getKeyword().isEmpty()){
|
||||
for (int i = 0; i < keywordIds.size(); i++) {
|
||||
// if (takeAll) {
|
||||
// List<String> tags = new ArrayList<>();
|
||||
// for (String keyword : rda.getKeyword()) {
|
||||
// tags.add(mapper.writeValueAsString(toTagEntity(keyword)));
|
||||
// }
|
||||
// properties.put(keywordIds.get(i), tags);
|
||||
// } else {
|
||||
// properties.put(keywordIds.get(i), mapper.writeValueAsString(toTagEntity(rda.getKeyword().get(i))));
|
||||
// }
|
||||
properties.put(keywordIds.get(i), rda.getKeyword().get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
package eu.eudat.models.rda.mapper;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.logic.utilities.helpers.MyStringUtils;
|
||||
import eu.eudat.logic.utilities.json.JavaToJson;
|
||||
import eu.eudat.logic.utilities.json.JsonSearcher;
|
||||
import eu.eudat.models.rda.Distribution;
|
||||
import eu.eudat.models.rda.License;
|
||||
|
@ -21,7 +25,8 @@ public class DistributionRDAMapper {
|
|||
for (JsonNode node: nodes) {
|
||||
String rdaProperty = node.get("rdaProperty").asText();
|
||||
String rdaValue = node.get("value").asText();
|
||||
if(rdaValue == null || rdaValue.isEmpty()){
|
||||
//if(rdaValue == null || rdaValue.isEmpty()){
|
||||
if(rdaValue == null || (rdaValue.isEmpty() && !node.get("value").isArray())){
|
||||
continue;
|
||||
}
|
||||
String key = node.get("numbering").asText();
|
||||
|
@ -39,10 +44,10 @@ public class DistributionRDAMapper {
|
|||
rda = new Distribution();
|
||||
rdaMap.put(key, rda);
|
||||
}
|
||||
/* Distribution rda = getRelative(rdaMap, node.get("numbering").asText());
|
||||
/* Distribution rda = getRelative(rdaMap, node.get("numbering").asText());
|
||||
if (!rdaMap.containsValue(rda)) {
|
||||
rdaMap.put(node.get("numbering").asText(), rda);
|
||||
}*/
|
||||
} */
|
||||
for (ExportPropertyName exportPropertyName : ExportPropertyName.values()) {
|
||||
if (rdaProperty.contains(exportPropertyName.getName())) {
|
||||
switch (exportPropertyName) {
|
||||
|
@ -78,7 +83,28 @@ public class DistributionRDAMapper {
|
|||
rda.setLicense(license != null? Collections.singletonList(license): new ArrayList<>());
|
||||
break;
|
||||
case FORMAT:
|
||||
rda.setFormat(new ArrayList<>(Arrays.asList(rdaValue.replace(" ", "").split(","))));
|
||||
if(node.get("value").isArray()){
|
||||
Iterator<JsonNode> iter = node.get("value").elements();
|
||||
List<String> formats = new ArrayList<>();
|
||||
int i = 1;
|
||||
while(iter.hasNext()) {
|
||||
JsonNode current = iter.next();
|
||||
String format = JavaToJson.objectStringToJson(current.asText());
|
||||
try {
|
||||
Map<String, String> result = new ObjectMapper().readValue(format, HashMap.class);
|
||||
format = result.get("label");
|
||||
formats.add(format);
|
||||
rda.setAdditionalProperty("format" + i++, new ObjectMapper().readTree(current.asText()));
|
||||
}
|
||||
catch(JsonProcessingException e){
|
||||
logger.warn(e.getMessage());
|
||||
}
|
||||
}
|
||||
rda.setFormat(formats);
|
||||
}
|
||||
else{
|
||||
rda.setFormat(new ArrayList<>(Arrays.asList(rdaValue.replace(" ", "").split(","))));
|
||||
}
|
||||
rda.setAdditionalProperty(ImportPropertyName.FORMAT.getName(), node.get("id").asText());
|
||||
break;
|
||||
case TITLE:
|
||||
|
@ -183,7 +209,19 @@ public class DistributionRDAMapper {
|
|||
break;
|
||||
case FORMAT:
|
||||
if (rda.getFormat() != null && !rda.getFormat().isEmpty()) {
|
||||
properties.put(distributionNode.get("id").asText(), rda.getFormat().get(0));
|
||||
Map<String, Object> additionalProperties = rda.getAdditionalProperties();
|
||||
List<Object> standardFormats = new ArrayList<>();
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
rda.getAdditionalProperties().forEach((key, value) -> {
|
||||
try {
|
||||
if (key.matches("format\\d+")) {
|
||||
standardFormats.add(additionalProperties.get(key));
|
||||
properties.put(distributionNode.get("id").asText(), mapper.writeValueAsString(standardFormats));
|
||||
}
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
break;
|
||||
case LICENSE:
|
||||
|
|
|
@ -68,7 +68,7 @@ public class DmpRDAMapper {
|
|||
|
||||
if (!extraProperties.isEmpty()) {
|
||||
if (extraProperties.get("ethicalIssues") != null) {
|
||||
rda.setEthicalIssuesExist(Dmp.EthicalIssuesExist.fromValue(extraProperties.get("ethicalIsses").toString()));
|
||||
rda.setEthicalIssuesExist(Dmp.EthicalIssuesExist.fromValue(extraProperties.get("ethicalIssues").toString()));
|
||||
} else {
|
||||
rda.setEthicalIssuesExist(Dmp.EthicalIssuesExist.UNKNOWN);
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class DmpRDAMapper {
|
|||
rda.getContributor().addAll(dmp.getResearchers().stream().map(ContributorRDAMapper::toRDA).collect(Collectors.toList()));
|
||||
}
|
||||
// rda.getContributor().addAll(dmp.getUsers().stream().map(ContributorRDAMapper::toRDA).collect(Collectors.toList()));
|
||||
rda.setDataset(dmp.getDataset().stream().filter(dataset -> dataset.getStatus() != eu.eudat.elastic.entities.Dmp.DMPStatus.DELETED.getValue()).map(dataset -> datasetRDAMapper.toRDA(dataset, rda.getContributor())).collect(Collectors.toList()));
|
||||
rda.setDataset(dmp.getDataset().stream().filter(dataset -> dataset.getStatus() != eu.eudat.elastic.entities.Dmp.DMPStatus.DELETED.getValue()).map(dataset -> datasetRDAMapper.toRDA(dataset, rda)).collect(Collectors.toList()));
|
||||
rda.setProject(Collections.singletonList(ProjectRDAMapper.toRDA(dmp.getProject(), dmp.getGrant())));
|
||||
rda.setAdditionalProperty("templates", dmp.getAssociatedDmps().stream().map(datasetProfile -> datasetProfile.getId().toString()).toArray());
|
||||
return rda;
|
||||
|
|
|
@ -22,7 +22,7 @@ public class HostRDAMapper {
|
|||
String rdaProperty = node.get("rdaProperty").asText();
|
||||
if (rdaProperty.contains("host")) {
|
||||
int firstDiff = MyStringUtils.getFirstDifference(numbering, node.get("numbering").asText());
|
||||
if (firstDiff == -1 || firstDiff > 2) {
|
||||
if (firstDiff == -1 || firstDiff >= 2) {
|
||||
String rdaValue = node.get("value").asText();
|
||||
if(rdaValue == null || (rdaValue.isEmpty() && !node.get("value").isArray())){
|
||||
continue;
|
||||
|
@ -40,6 +40,7 @@ public class HostRDAMapper {
|
|||
break;
|
||||
case BACKUP_TYPE:
|
||||
rda.setBackupType(rdaValue);
|
||||
rda.setAdditionalProperty(ImportPropertyName.BACKUP_TYPE.getName(), node.get("id").asText());
|
||||
break;
|
||||
case CERTIFIED_WITH:
|
||||
rda.setCertifiedWith(Host.CertifiedWith.fromValue(rdaValue));
|
||||
|
@ -67,7 +68,14 @@ public class HostRDAMapper {
|
|||
while(iter.hasNext()) {
|
||||
pList.add(iter.next().asText());
|
||||
}
|
||||
List<PidSystem> pidList = pList.stream().map(PidSystem::fromValue).collect(Collectors.toList());
|
||||
List<PidSystem> pidList;
|
||||
if(pList.size() == 0){
|
||||
pidList = Arrays.stream(rdaValue.replaceAll("[\\[\"\\]]","").split(","))
|
||||
.map(PidSystem::fromValue).collect(Collectors.toList());
|
||||
}
|
||||
else{
|
||||
pidList = pList.stream().map(PidSystem::fromValue).collect(Collectors.toList());
|
||||
}
|
||||
rda.setPidSystem(pidList);
|
||||
rda.setAdditionalProperty(ImportPropertyName.PID_SYSTEM.getName(), node.get("id").asText());
|
||||
}
|
||||
|
@ -138,7 +146,14 @@ public class HostRDAMapper {
|
|||
properties.put(entry.getValue().toString(), rda.getGeoLocation().value());
|
||||
break;
|
||||
case PID_SYSTEM:
|
||||
properties.put(entry.getValue().toString(), rda.getPidSystem().get(0).value());
|
||||
List<Object> pids = new ArrayList<>();
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
for(PidSystem pid: rda.getPidSystem()){
|
||||
pids.add(pid.value());
|
||||
}
|
||||
if(!pids.isEmpty()){
|
||||
properties.put(entry.getValue().toString(), mapper.writeValueAsString(pids));
|
||||
}
|
||||
break;
|
||||
case STORAGE_TYPE:
|
||||
properties.put(entry.getValue().toString(), rda.getStorageType());
|
||||
|
|
|
@ -15,21 +15,17 @@ public class KeywordRDAMapper {
|
|||
private static final Logger logger = LoggerFactory.getLogger(KeywordRDAMapper.class);
|
||||
|
||||
public static List<String> toRDA(String value) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
value = JavaToJson.objectStringToJson(value);
|
||||
if (!value.isEmpty()) {
|
||||
try {
|
||||
List<Tag> tags = Arrays.asList(mapper.readValue(value, Tag[].class));
|
||||
List<String> keywordNames = tags.stream().map(Tag::getName).collect(Collectors.toList());
|
||||
return keywordNames;
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.warn(e.getMessage() + ". Attempting to parse it as a String list.");
|
||||
if(!value.isEmpty()) {
|
||||
return new ArrayList<>(Arrays.asList(value.replace(" ", "").split(",")));
|
||||
}
|
||||
}
|
||||
|
||||
if (!value.isEmpty()) {
|
||||
try {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
String valueJson = JavaToJson.objectStringToJson(value);
|
||||
List<Tag> tags = Arrays.asList(mapper.readValue(valueJson, Tag[].class));
|
||||
return tags.stream().map(Tag::getName).collect(Collectors.toList());
|
||||
} catch (JsonProcessingException e) {
|
||||
logger.warn(e.getMessage() + ". Attempting to parse it as a String list.");
|
||||
return new ArrayList<>(Arrays.asList(value.replace(" ", "").split(",")));
|
||||
}
|
||||
}
|
||||
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
|
|
@ -34,12 +34,49 @@ dataset.security_and_privacy.description
|
|||
dataset.security_and_privacy.title
|
||||
dataset.sensitive_data
|
||||
dataset.technical_resource.description
|
||||
dataset.technical_resource.technical_resource
|
||||
dataset.technical_resource.technical_resource.description
|
||||
dataset.technical_resource.technical_resource.name
|
||||
dataset.technical_resource.name
|
||||
dataset.title
|
||||
dataset.type
|
||||
dataset.issued
|
||||
dataset.dataset_id
|
||||
dataset.dataset_id.identifier
|
||||
dataset.dataset_id.type
|
||||
dmp.contributor
|
||||
dataset.description
|
||||
dmp.contact
|
||||
dmp.contact.contact_id.identifier
|
||||
dmp.contact.contact_id.type
|
||||
dmp.contact.mbox
|
||||
dmp.contact.name
|
||||
dmp.contributor
|
||||
dmp.contributor.contributor_id.identifier
|
||||
dmp.contributor.contributor_id.type
|
||||
dmp.contributor.mbox
|
||||
dmp.contributor.name
|
||||
dmp.contributor.role
|
||||
dmp.cost
|
||||
dmp.cost.currency_code
|
||||
dmp.cost.description
|
||||
dmp.cost.title
|
||||
dmp.cost.value
|
||||
dmp.created
|
||||
dmp.description
|
||||
dmp.dmp_id
|
||||
dmp.dmp_id.identifier
|
||||
dmp.dmp_id.type
|
||||
dmp.ethical_issues_description
|
||||
dmp.ethical_issues_exist
|
||||
dmp.ethical_issues_report
|
||||
dmp.language
|
||||
dmp.modified
|
||||
dmp.project
|
||||
dmp.project.description
|
||||
dmp.project.end
|
||||
dmp.project.funding
|
||||
dmp.project.funding.funder_id.identifier
|
||||
dmp.project.funding.funder_id.type
|
||||
dmp.project.funding.funding_status
|
||||
dmp.project.funding.grant_id.identifier
|
||||
dmp.project.funding.grant_id.type
|
||||
dmp.project.start
|
||||
dmp.project.title
|
||||
dmp.title
|
|
@ -19,7 +19,7 @@ elasticsearch.index=dmps
|
|||
http-logger.server-address = http://localhost:31311
|
||||
|
||||
####################PDF OVERRIDES CONFIGURATIONS##########
|
||||
pdf.converter.url=http://localhost:88/
|
||||
pdf.converter.url=http://localhost:3000/
|
||||
|
||||
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
|
||||
configuration.externalUrls=externalUrls/ExternalUrls.xml
|
||||
|
|
|
@ -1183,7 +1183,18 @@ but not
|
|||
<mapping source="metadata.title" target="label" />
|
||||
<mapping source="metadata.description" target="description" />
|
||||
<mapping source="metadata.license.id" maDmpTarget="dataset.distribution.license.license_ref" />
|
||||
<mapping source="metadata.keywords" target="tags"/>
|
||||
<mapping source="metadata.filesize" maDmpTarget="dataset.distribution.byte_size"/>
|
||||
<mapping source="metadata.language" maDmpTarget="dataset.metadata.language"/>
|
||||
<mapping source="metadata.dates.valid" maDmpTarget="dataset.distribution.available_until"/>
|
||||
<mapping source="metadata.access_right" maDmpTarget="dataset.distribution.data_access"/>
|
||||
<mapping source="metadata.publication_date" maDmpTarget="dataset.issued"/>
|
||||
<mapping source="metadata.license.id" maDmpTarget="dataset.distribution.license.license_ref"/>
|
||||
<mapping source="metadata.license.created" maDmpTarget="dataset.distribution.license.start_date"/>
|
||||
</mappings>
|
||||
<fixedMappings>
|
||||
<fixedMapping maDmpTarget="dataset.distribution.host.title" value="Zenodo" />
|
||||
</fixedMappings>
|
||||
</prefillingGet>
|
||||
</config>
|
||||
</prefillings>
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
familiarize users with basic RDM concepts and guide them throughout the process of
|
||||
writing DMPs. It also utilises the OpenAIRE pool of services and inferred sources to
|
||||
make DMPs more dynamic in use and easier to be completed and published. Argos is
|
||||
based on the OpenDMP <a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
based on the OpenDMP <a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">open source software</a> and is available through the <a
|
||||
href="http://catalogue.openaire.eu/" target="_blank">OpenAIRE
|
||||
Service catalogue</a> and the <a
|
||||
|
@ -166,8 +166,8 @@
|
|||
Catalogue</a>. But, you can always find Argos at
|
||||
argos.openaire.eu .
|
||||
To access Argos software, please visit
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
target="_blank">https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master</a>
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master</a>
|
||||
.
|
||||
</p>
|
||||
<br />
|
||||
|
@ -462,7 +462,7 @@
|
|||
<p>
|
||||
Yes, it is. The OpenDMP software that Argos has deployed upon is open source code
|
||||
available under Apache 2.0 license. You may find more information about the software
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">here</a>.
|
||||
</p>
|
||||
<br />
|
||||
|
@ -472,8 +472,8 @@
|
|||
<p>
|
||||
Of course! Please feel free to suggest new features and to actively contribute to
|
||||
Argos development via pull requests in <a
|
||||
href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
target="_blank">GitLab</a>.
|
||||
href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">Gitea</a>.
|
||||
</p>
|
||||
<br />
|
||||
<h4>
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<p><span>The </span><span>OpenDMP</span><span> service was developed to provide a more flexible, </span><span>collaborative </span><span>environment with machine actionable solutions in writing, sharing and publishing Data Management Plans (DMPs). It is a product of </span><span>cooperation between </span><span>OpenAIRE </span><span>AMKE</span><span class="c0"> and EUDAT CDI and is offered both as a software “OpenDMP '' and as an online service under the name “ARGOS”. </span></p>
|
||||
<p><span></span></p>
|
||||
<ol>
|
||||
<li><span><a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot">OpenDMP software</a></span><span> is offered under the Free Open Source Software license </span><span>Apache 2.0</span><span class="c0">, for further development and use by institutions and interested parties.</span></li>
|
||||
<li><span><a href="https://code-repo.d4science.org/MaDgiK-CITE/argos">OpenDMP software</a></span><span> is offered under the Free Open Source Software license </span><span>Apache 2.0</span><span class="c0">, for further development and use by institutions and interested parties.</span></li>
|
||||
<li><span><a href="https://argos.openaire.eu/">ARGOS</a></span><span> service</span><span> is offered by</span><span> </span><span>OpenAIRE</span><span> as </span><span>part of its mission to support Open Science in the European Research Area, focusing on information linking and contextualisation that enriches its </span><span class="c5"><a href="https://zenodo.org/record/2600275#.XZpJgUYzY2w">Research Graph</a></span><span>.</span><span class="c0"> Use of ARGOS denotes agreement with the following terms:</span>
|
||||
<ol>
|
||||
<li><span>ARGOS is a software interface and a database with no storage capacity to store or preserve research data. The DMPs created are hosted in the </span><span>OpenAIRE </span><span>production environment for the sole purpose of exposing the DMP records once finalised (“published”). If assigned a DOI, the DMP records are linked to and preserved in Zenodo, the OpenAIRE’s repository service. The ARGOS service is made available for use free-of-charge for research, educational and informational purposes.</span></li>
|
||||
|
@ -31,4 +31,4 @@
|
|||
<p><span>For any questions or comments you may have about the current Terms of Service, please contact us: </span><span class="c5"><a href="mailto:argos@openaire.eu">argos@openaire.eu</a></span><span class="c0"> </span></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -105,7 +105,7 @@
|
|||
writing DMPs. It also utilises the OpenAIRE pool of services and inferred sources to
|
||||
make DMPs more dynamic in use and easier to be completed and published. Argos is
|
||||
based on the OpenDMP <a
|
||||
href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">open source software</a> and is available through the <a
|
||||
href="http://catalogue.openaire.eu/" target="_blank">OpenAIRE
|
||||
Service catalogue</a> and the <a
|
||||
|
@ -362,8 +362,8 @@
|
|||
target="_blank">EOSC Catalogue</a>. But, you can always find Argos at
|
||||
argos.openaire.eu .
|
||||
To access Argos software, please visit
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master"
|
||||
target="_blank">https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master</a>
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
|
||||
target="_blank">https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master</a>
|
||||
.
|
||||
</div>
|
||||
</div>
|
||||
|
@ -866,7 +866,7 @@
|
|||
<div class="faq-content">
|
||||
Yes, it is. The OpenDMP software that Argos has deployed upon is open source code
|
||||
available under Apache 2.0 license. You may find more information about the software
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master" target="_blank">here</a>.
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master" target="_blank">here</a>.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -880,7 +880,7 @@
|
|||
<div class="collapse " id="collapseFAQ-8-2">
|
||||
<div class="faq-content">
|
||||
Of course! Please feel free to suggest new features and to actively contribute to
|
||||
Argos development via pull requests in <a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot/tree/master" target="_blank">GitLab</a>.
|
||||
Argos development via pull requests in <a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master" target="_blank">Gitea</a>.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -78,15 +78,15 @@
|
|||
<div class="col">
|
||||
<p>Open to all to suggest new features and to actively contribute to Argos development via pull
|
||||
requests of code in
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot" target="_blank">
|
||||
GitLab
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos" target="_blank">
|
||||
Gitea
|
||||
<i class="fas fa-external-link-alt ext-link-icon fa-sm"></i>
|
||||
</a>!
|
||||
<br>Note that this page is under development.
|
||||
</p>
|
||||
</div>
|
||||
<div class="col d-flex justify-content-center mt-5 pt-5 mb-5 pb-5">
|
||||
<a href="https://gitlab.eudat.eu/dmp/OpenAIRE-EUDAT-DMP-service-pilot">
|
||||
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos">
|
||||
<button class="normal-btn">Download argos</button>
|
||||
</a>
|
||||
</div>
|
||||
|
|
Loading…
Reference in New Issue