dmp import fixes

This commit is contained in:
Bernaldo Mihasi 2022-01-18 11:38:24 +02:00
parent 99cecbc4fb
commit 61de8c4df0
3 changed files with 89 additions and 37 deletions

View File

@ -1,11 +1,13 @@
package eu.eudat.models.rda.mapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.DatasetProfile;
import eu.eudat.elastic.entities.Tag;
import eu.eudat.logic.managers.DatasetManager;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.logic.utilities.json.JavaToJson;
import eu.eudat.logic.utilities.json.JsonSearcher;
import eu.eudat.models.data.datasetprofile.DatasetProfileOverviewModel;
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
@ -89,10 +91,27 @@ public class DatasetRDAMapper {
for (int i = 0; i < qaNodes.size(); i++) {
rda.setAdditionalProperty("qaId" + (i + 1), qaNodes.get(i).get("id").asText());
}*/
List<String> qaList = qaNodes.stream()
.map(qaNode -> qaNode.get("value").asText())
.filter(qaNode -> !qaNode.isEmpty())
.collect(Collectors.toList());
List<String> qaList = new ArrayList<>();
String qa;
for(JsonNode node: qaNodes){
if(node.get("value").isArray()){
Iterator<JsonNode> iter = node.get("value").elements();
while(iter.hasNext()) {
qa = iter.next().asText();
qaList.add(qa);
}
}
}
String data_quality;
for(JsonNode dqa: qaNodes){
data_quality = dqa.get("value").asText();
if(!data_quality.isEmpty()){
qaList.add(data_quality);
rda.setAdditionalProperty("otherDQAID", dqa.get("id"));
rda.setAdditionalProperty("otherDQA", data_quality);
break;
}
}
rda.setDataQualityAssurance(qaList);
}else{
rda.setDataQualityAssurance(new ArrayList<>());
@ -197,21 +216,21 @@ public class DatasetRDAMapper {
dmp.setEthicalIssuesExist(Dmp.EthicalIssuesExist.UNKNOWN);
}
}
else if(rdaProperty.contains("description")){
if(dmp.getEthicalIssuesDescription() == null){
dmp.setEthicalIssuesDescription(rdaValue);
}
else{
dmp.setEthicalIssuesDescription(dmp.getEthicalIssuesDescription() + ", " + rdaValue);
}
}
else if(rdaProperty.contains("report")){
try {
dmp.setEthicalIssuesReport(URI.create(rdaValue));
} catch (IllegalArgumentException e) {
logger.warn(e.getLocalizedMessage() + ". Skipping url parsing");
}
}
// else if(rdaProperty.contains("description")){
// if(dmp.getEthicalIssuesDescription() == null){
// dmp.setEthicalIssuesDescription(rdaValue);
// }
// else{
// dmp.setEthicalIssuesDescription(dmp.getEthicalIssuesDescription() + ", " + rdaValue);
// }
// }
// else if(rdaProperty.contains("report")){
// try {
// dmp.setEthicalIssuesReport(URI.create(rdaValue));
// } catch (IllegalArgumentException e) {
// logger.warn(e.getLocalizedMessage() + ". Skipping url parsing");
// }
// }
}
}
List<JsonNode> foundNodes = Stream.of(typeNodes, languageNodes, metadataNodes, qaNodes, preservationNodes, distributionNodes,
@ -282,7 +301,14 @@ public class DatasetRDAMapper {
}*/
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.data_quality_assurance");
if (!qaNodes.isEmpty() && rda.getDataQualityAssurance() != null && !rda.getDataQualityAssurance().isEmpty()) {
properties.put(qaNodes.get(0).get("id").asText(), rda.getDataQualityAssurance().get(0));
ObjectMapper m = new ObjectMapper();
List<String> qas = new ArrayList<>(rda.getDataQualityAssurance());
if(!qas.isEmpty()){
properties.put(qaNodes.get(0).get("id").asText(), m.writeValueAsString(qas));
if(rda.getAdditionalProperties().containsKey("otherDQAID")){
properties.put((String)rda.getAdditionalProperties().get("otherDQAID"), rda.getAdditionalProperties().get("otherDQA"));
}
}
}
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.preservation_statement");
@ -295,25 +321,28 @@ public class DatasetRDAMapper {
properties.put(issuedNodes.get(0).get("id").asText(), rda.getIssued());
}
if (rda.getDistribution() != null) {
if (rda.getDistribution() != null && !rda.getDistribution().isEmpty()) {
properties.putAll(DistributionRDAMapper.toProperties(rda.getDistribution().get(0), datasetDescriptionObj));
}
if (rda.getKeyword() != null) {
List<String> keywordIds = rda.getAdditionalProperties().entrySet().stream().filter(entry -> entry.getKey().startsWith("keyword")).map(entry -> entry.getValue().toString()).collect(Collectors.toList());
boolean takeAll = false;
if (keywordIds.size() < rda.getKeyword().size()) {
takeAll = true;
}
for (int i = 0; i < keywordIds.size(); i++) {
if (takeAll) {
List<String> tags = new ArrayList<>();
for (String keyword : rda.getKeyword()) {
tags.add(mapper.writeValueAsString(toTagEntity(keyword)));
}
properties.put(keywordIds.get(i), tags);
} else {
properties.put(keywordIds.get(i), mapper.writeValueAsString(toTagEntity(rda.getKeyword().get(i))));
// boolean takeAll = false;
// if (keywordIds.size() < rda.getKeyword().size()) {
// takeAll = true;
// }
if(!rda.getKeyword().isEmpty()){
for (int i = 0; i < keywordIds.size(); i++) {
// if (takeAll) {
// List<String> tags = new ArrayList<>();
// for (String keyword : rda.getKeyword()) {
// tags.add(mapper.writeValueAsString(toTagEntity(keyword)));
// }
// properties.put(keywordIds.get(i), tags);
// } else {
// properties.put(keywordIds.get(i), mapper.writeValueAsString(toTagEntity(rda.getKeyword().get(i))));
// }
properties.put(keywordIds.get(i), rda.getKeyword().get(i));
}
}
}

View File

@ -86,12 +86,15 @@ public class DistributionRDAMapper {
if(node.get("value").isArray()){
Iterator<JsonNode> iter = node.get("value").elements();
List<String> formats = new ArrayList<>();
int i = 1;
while(iter.hasNext()) {
String format = JavaToJson.objectStringToJson(iter.next().asText());
JsonNode current = iter.next();
String format = JavaToJson.objectStringToJson(current.asText());
try {
Map<String, String> result = new ObjectMapper().readValue(format, HashMap.class);
format = result.get("label");
formats.add(format);
rda.setAdditionalProperty("format" + i++, new ObjectMapper().readTree(current.asText()));
}
catch(JsonProcessingException e){
logger.warn(e.getMessage());
@ -206,7 +209,19 @@ public class DistributionRDAMapper {
break;
case FORMAT:
if (rda.getFormat() != null && !rda.getFormat().isEmpty()) {
properties.put(distributionNode.get("id").asText(), rda.getFormat().get(0));
Map<String, Object> additionalProperties = rda.getAdditionalProperties();
List<Object> standardFormats = new ArrayList<>();
ObjectMapper mapper = new ObjectMapper();
rda.getAdditionalProperties().forEach((key, value) -> {
try {
if (key.matches("format\\d+")) {
standardFormats.add(additionalProperties.get(key));
properties.put(distributionNode.get("id").asText(), mapper.writeValueAsString(standardFormats));
}
} catch (JsonProcessingException e) {
logger.error(e.getMessage(), e);
}
});
}
break;
case LICENSE:

View File

@ -40,6 +40,7 @@ public class HostRDAMapper {
break;
case BACKUP_TYPE:
rda.setBackupType(rdaValue);
rda.setAdditionalProperty(ImportPropertyName.BACKUP_TYPE.getName(), node.get("id").asText());
break;
case CERTIFIED_WITH:
rda.setCertifiedWith(Host.CertifiedWith.fromValue(rdaValue));
@ -145,7 +146,14 @@ public class HostRDAMapper {
properties.put(entry.getValue().toString(), rda.getGeoLocation().value());
break;
case PID_SYSTEM:
properties.put(entry.getValue().toString(), rda.getPidSystem().get(0).value());
List<Object> pids = new ArrayList<>();
ObjectMapper mapper = new ObjectMapper();
for(PidSystem pid: rda.getPidSystem()){
pids.add(pid.value());
}
if(!pids.isEmpty()){
properties.put(entry.getValue().toString(), mapper.writeValueAsString(pids));
}
break;
case STORAGE_TYPE:
properties.put(entry.getValue().toString(), rda.getStorageType());