1
0
Fork 0

fix formtat problem

This commit is contained in:
Enrico Ottonello 2020-04-23 15:25:39 +02:00
parent bb6c9785b4
commit 4a6aea1a37
7 changed files with 1100 additions and 978 deletions

View File

@ -1,6 +1,8 @@
package eu.dnetlib.doiboost.crossref; package eu.dnetlib.doiboost.crossref;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import java.io.ByteArrayOutputStream;
import java.util.zip.Inflater;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -12,30 +14,29 @@ import org.apache.hadoop.io.Text;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.util.zip.Inflater;
public class CrossrefImporter { public class CrossrefImporter {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(CrossrefImporter.class.getResourceAsStream("/eu/dnetlib/dhp/doiboost/import_from_es.json"))); final ArgumentApplicationParser parser =
new ArgumentApplicationParser(
IOUtils.toString(
CrossrefImporter.class.getResourceAsStream(
"/eu/dnetlib/dhp/doiboost/import_from_es.json")));
Logger logger = LoggerFactory.getLogger(CrossrefImporter.class); Logger logger = LoggerFactory.getLogger(CrossrefImporter.class);
parser.parseArgument(args); parser.parseArgument(args);
final String hdfsuri = parser.get("namenode"); final String hdfsuri = parser.get("namenode");
logger.info("HDFS URI"+hdfsuri); logger.info("HDFS URI" + hdfsuri);
Path hdfswritepath = new Path(parser.get("targetPath")); Path hdfswritepath = new Path(parser.get("targetPath"));
logger.info("TargetPath: "+hdfsuri); logger.info("TargetPath: " + hdfsuri);
final Long timestamp = StringUtils.isNotBlank(parser.get("timestamp"))?Long.parseLong(parser.get("timestamp")):-1; final Long timestamp =
StringUtils.isNotBlank(parser.get("timestamp"))
if(timestamp>0) ? Long.parseLong(parser.get("timestamp"))
logger.info("Timestamp added "+timestamp); : -1;
if (timestamp > 0) logger.info("Timestamp added " + timestamp);
// ====== Init HDFS File System Object // ====== Init HDFS File System Object
Configuration conf = new Configuration(); Configuration conf = new Configuration();
@ -45,16 +46,21 @@ public class CrossrefImporter {
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName()); conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
ESClient client =
timestamp > 0
? new ESClient(
"ip-90-147-167-25.ct1.garrservices.it", "crossref", timestamp)
: new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref");
try (SequenceFile.Writer writer =
ESClient client = timestamp>0?new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref", timestamp):new ESClient("ip-90-147-167-25.ct1.garrservices.it", "crossref"); SequenceFile.createWriter(
conf,
try (SequenceFile.Writer writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(hdfswritepath),
SequenceFile.Writer.file(hdfswritepath), SequenceFile.Writer.keyClass(IntWritable.class), SequenceFile.Writer.keyClass(IntWritable.class),
SequenceFile.Writer.valueClass(Text.class))) { SequenceFile.Writer.valueClass(Text.class))) {
int i = 0; int i = 0;
long start= System.currentTimeMillis(); long start = System.currentTimeMillis();
long end = 0; long end = 0;
final IntWritable key = new IntWritable(i); final IntWritable key = new IntWritable(i);
final Text value = new Text(); final Text value = new Text();
@ -65,7 +71,10 @@ public class CrossrefImporter {
if (i % 1000000 == 0) { if (i % 1000000 == 0) {
end = System.currentTimeMillis(); end = System.currentTimeMillis();
final float time = (end - start) / 1000.0F; final float time = (end - start) / 1000.0F;
logger.info(String.format("Imported %d records last 100000 imported in %f seconds", i, time)); logger.info(
String.format(
"Imported %d records last 100000 imported in %f seconds",
i, time));
start = System.currentTimeMillis(); start = System.currentTimeMillis();
} }
} }
@ -87,7 +96,7 @@ public class CrossrefImporter {
decompresser.end(); decompresser.end();
return new String(unzippeddata); return new String(unzippeddata);
} catch (Throwable e) { } catch (Throwable e) {
throw new RuntimeException("Wrong record:" + blob,e); throw new RuntimeException("Wrong record:" + blob, e);
} }
} }
} }

View File

@ -1,6 +1,9 @@
package eu.dnetlib.doiboost.crossref; package eu.dnetlib.doiboost.crossref;
import com.jayway.jsonpath.JsonPath; import com.jayway.jsonpath.JsonPath;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
@ -10,18 +13,15 @@ import org.apache.http.impl.client.HttpClients;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
public class ESClient implements Iterator<String> { public class ESClient implements Iterator<String> {
private final static Logger logger = LoggerFactory.getLogger(ESClient.class); private static final Logger logger = LoggerFactory.getLogger(ESClient.class);
final static String blobPath = "$.hits[*].hits[*]._source.blob"; static final String blobPath = "$.hits[*].hits[*]._source.blob";
final static String scrollIdPath = "$._scroll_id"; static final String scrollIdPath = "$._scroll_id";
final static String JSON_NO_TS ="{\"size\":1000}"; static final String JSON_NO_TS = "{\"size\":1000}";
final static String JSON_WITH_TS ="{\"size\":1000, \"query\":{\"range\":{\"timestamp\":{\"gte\":%d}}}}"; static final String JSON_WITH_TS =
final static String JSON_SCROLL = "{\"scroll_id\":\"%s\",\"scroll\" : \"1m\"}"; "{\"size\":1000, \"query\":{\"range\":{\"timestamp\":{\"gte\":%d}}}}";
static final String JSON_SCROLL = "{\"scroll_id\":\"%s\",\"scroll\" : \"1m\"}";
private final String scrollId; private final String scrollId;
@ -29,29 +29,34 @@ public class ESClient implements Iterator<String> {
private final String esHost; private final String esHost;
public ESClient(final String esHost, final String esIndex) throws IOException { public ESClient(final String esHost, final String esIndex) throws IOException {
this.esHost = esHost; this.esHost = esHost;
final String body =getResponse(String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex), JSON_NO_TS); final String body =
scrollId= getJPathString(scrollIdPath, body); getResponse(
String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex),
JSON_NO_TS);
scrollId = getJPathString(scrollIdPath, body);
buffer = getBlobs(body); buffer = getBlobs(body);
} }
public ESClient(final String esHost, final String esIndex, final long timestamp)
public ESClient(final String esHost, final String esIndex, final long timestamp) throws IOException { throws IOException {
this.esHost = esHost; this.esHost = esHost;
final String body =getResponse(String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex), String.format(JSON_WITH_TS, timestamp)); final String body =
scrollId= getJPathString(scrollIdPath, body); getResponse(
String.format("http://%s:9200/%s/_search?scroll=1m", esHost, esIndex),
String.format(JSON_WITH_TS, timestamp));
scrollId = getJPathString(scrollIdPath, body);
buffer = getBlobs(body); buffer = getBlobs(body);
} }
private String getResponse(final String url,final String json ) { private String getResponse(final String url, final String json) {
CloseableHttpClient client = HttpClients.createDefault(); CloseableHttpClient client = HttpClients.createDefault();
try { try {
HttpPost httpPost = new HttpPost(url); HttpPost httpPost = new HttpPost(url);
if (json!= null) { if (json != null) {
StringEntity entity = new StringEntity(json); StringEntity entity = new StringEntity(json);
httpPost.setEntity(entity); httpPost.setEntity(entity);
httpPost.setHeader("Accept", "application/json"); httpPost.setHeader("Accept", "application/json");
@ -61,22 +66,20 @@ public class ESClient implements Iterator<String> {
return IOUtils.toString(response.getEntity().getContent()); return IOUtils.toString(response.getEntity().getContent());
} catch (Throwable e) { } catch (Throwable e) {
throw new RuntimeException("Error on executing request ",e); throw new RuntimeException("Error on executing request ", e);
} finally { } finally {
try { try {
client.close(); client.close();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("Unable to close client ",e); throw new RuntimeException("Unable to close client ", e);
} }
} }
} }
private String getJPathString(final String jsonPath, final String json) { private String getJPathString(final String jsonPath, final String json) {
try { try {
Object o = JsonPath.read(json, jsonPath); Object o = JsonPath.read(json, jsonPath);
if (o instanceof String) if (o instanceof String) return (String) o;
return (String) o;
return null; return null;
} catch (Exception e) { } catch (Exception e) {
return ""; return "";
@ -84,14 +87,13 @@ public class ESClient implements Iterator<String> {
} }
private List<String> getBlobs(final String body) { private List<String> getBlobs(final String body) {
final List<String > res = JsonPath.read(body, "$.hits.hits[*]._source.blob"); final List<String> res = JsonPath.read(body, "$.hits.hits[*]._source.blob");
return res; return res;
} }
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return (buffer!= null && !buffer.isEmpty()); return (buffer != null && !buffer.isEmpty());
} }
@Override @Override
@ -100,11 +102,12 @@ public class ESClient implements Iterator<String> {
if (buffer.isEmpty()) { if (buffer.isEmpty()) {
final String json_param = String.format(JSON_SCROLL, scrollId); final String json_param = String.format(JSON_SCROLL, scrollId);
final String body =getResponse(String.format("http://%s:9200/_search/scroll", esHost), json_param); final String body =
getResponse(String.format("http://%s:9200/_search/scroll", esHost), json_param);
try { try {
buffer = getBlobs(body); buffer = getBlobs(body);
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error on get next page: body:"+body); logger.error("Error on get next page: body:" + body);
} }
} }
return nextItem; return nextItem;

View File

@ -1,20 +1,18 @@
package eu.dnetlib.doiboost.orcid.json; package eu.dnetlib.doiboost.orcid.json;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import eu.dnetlib.doiboost.orcid.model.AuthorData; import eu.dnetlib.doiboost.orcid.model.AuthorData;
public class JsonWriter { public class JsonWriter {
public static String create(AuthorData authorData) { public static String create(AuthorData authorData) {
JsonObject author = new JsonObject(); JsonObject author = new JsonObject();
author.addProperty("oid", authorData.getOid()); author.addProperty("oid", authorData.getOid());
author.addProperty("name", authorData.getName()); author.addProperty("name", authorData.getName());
author.addProperty("surname", authorData.getSurname()); author.addProperty("surname", authorData.getSurname());
if (authorData.getCreditName()!=null) { if (authorData.getCreditName() != null) {
author.addProperty("creditname", authorData.getCreditName()); author.addProperty("creditname", authorData.getCreditName());
} }
return author.toString(); return author.toString();
} }
} }

View File

@ -2,40 +2,49 @@ package eu.dnetlib.doiboost.orcid.model;
public class AuthorData { public class AuthorData {
private String oid; private String oid;
private String name; private String name;
private String surname; private String surname;
private String creditName; private String creditName;
private String errorCode; private String errorCode;
public String getErrorCode() { public String getErrorCode() {
return errorCode; return errorCode;
} }
public void setErrorCode(String errorCode) {
this.errorCode = errorCode; public void setErrorCode(String errorCode) {
} this.errorCode = errorCode;
public String getName() { }
return name;
} public String getName() {
public void setName(String name) { return name;
this.name = name; }
}
public String getSurname() { public void setName(String name) {
return surname; this.name = name;
} }
public void setSurname(String surname) {
this.surname = surname; public String getSurname() {
} return surname;
public String getCreditName() { }
return creditName;
} public void setSurname(String surname) {
public void setCreditName(String creditName) { this.surname = surname;
this.creditName = creditName; }
}
public String getOid() { public String getCreditName() {
return oid; return creditName;
} }
public void setOid(String oid) {
this.oid = oid; public void setCreditName(String creditName) {
} this.creditName = creditName;
}
public String getOid() {
return oid;
}
public void setOid(String oid) {
this.oid = oid;
}
} }

View File

@ -10,19 +10,6 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Node;
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants; import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
@ -41,388 +28,439 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.Software;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.DocumentHelper;
import org.dom4j.Node;
public abstract class AbstractMdRecordToOafMapper { public abstract class AbstractMdRecordToOafMapper {
protected final Map<String, String> code2name; protected final Map<String, String> code2name;
protected static final Qualifier MAIN_TITLE_QUALIFIER = protected static final Qualifier MAIN_TITLE_QUALIFIER =
qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); qualifier("main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) { protected AbstractMdRecordToOafMapper(final Map<String, String> code2name) {
this.code2name = code2name; this.code2name = code2name;
} }
public List<Oaf> processMdRecord(final String xml) { public List<Oaf> processMdRecord(final String xml) {
try { try {
final Map<String, String> nsContext = new HashMap<>(); final Map<String, String> nsContext = new HashMap<>();
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr"); nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri"); nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
nsContext.put("oaf", "http://namespace.openaire.eu/oaf"); nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/"); nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance"); nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
nsContext.put("dc", "http://purl.org/dc/elements/1.1/"); nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
nsContext.put("datacite", "http://datacite.org/schema/kernel-3"); nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext); DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
final Document doc = final Document doc =
DocumentHelper.parseText(xml.replaceAll("http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3")); DocumentHelper.parseText(
xml.replaceAll(
final String type = doc.valueOf("//dr:CobjCategory/@type"); "http://datacite.org/schema/kernel-4",
final KeyValue collectedFrom = "http://datacite.org/schema/kernel-3"));
keyValue(createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true), doc.valueOf("//oaf:collectedFrom/@name"));
final KeyValue hostedBy = final String type = doc.valueOf("//dr:CobjCategory/@type");
StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id")) final KeyValue collectedFrom =
? collectedFrom keyValue(
: keyValue(createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true), doc.valueOf("//oaf:hostedBy/@name")); createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true),
doc.valueOf("//oaf:collectedFrom/@name"));
final DataInfo info = prepareDataInfo(doc); final KeyValue hostedBy =
final long lastUpdateTimestamp = new Date().getTime(); StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
? collectedFrom
return createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp); : keyValue(
} catch (final Exception e) { createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
throw new RuntimeException(e); doc.valueOf("//oaf:hostedBy/@name"));
}
} final DataInfo info = prepareDataInfo(doc);
final long lastUpdateTimestamp = new Date().getTime();
protected List<Oaf> createOafs(
final Document doc, return createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp);
final String type, } catch (final Exception e) {
final KeyValue collectedFrom, throw new RuntimeException(e);
final KeyValue hostedBy, }
final DataInfo info, }
final long lastUpdateTimestamp) {
protected List<Oaf> createOafs(
final List<Oaf> oafs = new ArrayList<>(); final Document doc,
final String type,
switch (type.toLowerCase()) { final KeyValue collectedFrom,
case "": final KeyValue hostedBy,
case "publication": final DataInfo info,
final Publication p = new Publication(); final long lastUpdateTimestamp) {
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER); final List<Oaf> oafs = new ArrayList<>();
p.setJournal(prepareJournal(doc, info));
oafs.add(p); switch (type.toLowerCase()) {
break; case "":
case "dataset": case "publication":
final Dataset d = new Dataset(); final Publication p = new Publication();
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER); p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
d.setStoragedate(prepareDatasetStorageDate(doc, info)); p.setJournal(prepareJournal(doc, info));
d.setDevice(prepareDatasetDevice(doc, info)); oafs.add(p);
d.setSize(prepareDatasetSize(doc, info)); break;
d.setVersion(prepareDatasetVersion(doc, info)); case "dataset":
d.setLastmetadataupdate(prepareDatasetLastMetadataUpdate(doc, info)); final Dataset d = new Dataset();
d.setMetadataversionnumber(prepareDatasetMetadataVersionNumber(doc, info)); populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
d.setGeolocation(prepareDatasetGeoLocations(doc, info)); d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
oafs.add(d); d.setStoragedate(prepareDatasetStorageDate(doc, info));
break; d.setDevice(prepareDatasetDevice(doc, info));
case "software": d.setSize(prepareDatasetSize(doc, info));
final Software s = new Software(); d.setVersion(prepareDatasetVersion(doc, info));
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); d.setLastmetadataupdate(prepareDatasetLastMetadataUpdate(doc, info));
s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER); d.setMetadataversionnumber(prepareDatasetMetadataVersionNumber(doc, info));
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info)); d.setGeolocation(prepareDatasetGeoLocations(doc, info));
s.setLicense(prepareSoftwareLicenses(doc, info)); oafs.add(d);
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info)); break;
s.setProgrammingLanguage(prepareSoftwareProgrammingLanguage(doc, info)); case "software":
oafs.add(s); final Software s = new Software();
break; populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
case "otherresearchproducts": s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
default: s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
final OtherResearchProduct o = new OtherResearchProduct(); s.setLicense(prepareSoftwareLicenses(doc, info));
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp); s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER); s.setProgrammingLanguage(prepareSoftwareProgrammingLanguage(doc, info));
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info)); oafs.add(s);
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info)); break;
o.setTool(prepareOtherResearchProductTools(doc, info)); case "otherresearchproducts":
oafs.add(o); default:
break; final OtherResearchProduct o = new OtherResearchProduct();
} populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
if (!oafs.isEmpty()) { o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
oafs.addAll(addProjectRels(doc, collectedFrom, info, lastUpdateTimestamp)); o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
oafs.addAll(addOtherResultRels(doc, collectedFrom, info, lastUpdateTimestamp)); o.setTool(prepareOtherResearchProductTools(doc, info));
} oafs.add(o);
break;
return oafs; }
}
if (!oafs.isEmpty()) {
private List<Oaf> addProjectRels( oafs.addAll(addProjectRels(doc, collectedFrom, info, lastUpdateTimestamp));
final Document doc, oafs.addAll(addOtherResultRels(doc, collectedFrom, info, lastUpdateTimestamp));
final KeyValue collectedFrom, }
final DataInfo info,
final long lastUpdateTimestamp) { return oafs;
}
final List<Oaf> res = new ArrayList<>();
private List<Oaf> addProjectRels(
final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false); final Document doc,
final KeyValue collectedFrom,
for (final Object o : doc.selectNodes("//oaf:projectid")) { final DataInfo info,
final String projectId = createOpenaireId(40, ((Node) o).getText(), true); final long lastUpdateTimestamp) {
final Relation r1 = new Relation(); final List<Oaf> res = new ArrayList<>();
r1.setRelType("resultProject");
r1.setSubRelType("outcome"); final String docId = createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false);
r1.setRelClass("isProducedBy");
r1.setSource(docId); for (final Object o : doc.selectNodes("//oaf:projectid")) {
r1.setTarget(projectId); final String projectId = createOpenaireId(40, ((Node) o).getText(), true);
r1.setCollectedfrom(Arrays.asList(collectedFrom));
r1.setDataInfo(info); final Relation r1 = new Relation();
r1.setLastupdatetimestamp(lastUpdateTimestamp); r1.setRelType("resultProject");
res.add(r1); r1.setSubRelType("outcome");
r1.setRelClass("isProducedBy");
final Relation r2 = new Relation(); r1.setSource(docId);
r2.setRelType("resultProject"); r1.setTarget(projectId);
r2.setSubRelType("outcome"); r1.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setRelClass("produces"); r1.setDataInfo(info);
r2.setSource(projectId); r1.setLastupdatetimestamp(lastUpdateTimestamp);
r2.setTarget(docId); res.add(r1);
r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info); final Relation r2 = new Relation();
r2.setLastupdatetimestamp(lastUpdateTimestamp); r2.setRelType("resultProject");
res.add(r2); r2.setSubRelType("outcome");
} r2.setRelClass("produces");
r2.setSource(projectId);
return res; r2.setTarget(docId);
} r2.setCollectedfrom(Arrays.asList(collectedFrom));
r2.setDataInfo(info);
protected abstract List<Oaf> addOtherResultRels( r2.setLastupdatetimestamp(lastUpdateTimestamp);
final Document doc, res.add(r2);
final KeyValue collectedFrom, }
final DataInfo info,
final long lastUpdateTimestamp); return res;
}
private void populateResultFields(
final Result r, protected abstract List<Oaf> addOtherResultRels(
final Document doc, final Document doc,
final KeyValue collectedFrom, final KeyValue collectedFrom,
final KeyValue hostedBy, final DataInfo info,
final DataInfo info, final long lastUpdateTimestamp);
final long lastUpdateTimestamp) {
r.setDataInfo(info); private void populateResultFields(
r.setLastupdatetimestamp(lastUpdateTimestamp); final Result r,
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false)); final Document doc,
r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier"))); final KeyValue collectedFrom,
r.setCollectedfrom(Arrays.asList(collectedFrom)); final KeyValue hostedBy,
r.setPid(prepareListStructProps(doc, "//oaf:identifier", "@identifierType", "dnet:pid_types", "dnet:pid_types", info)); final DataInfo info,
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection")); final long lastUpdateTimestamp) {
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation")); r.setDataInfo(info);
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setLastupdatetimestamp(lastUpdateTimestamp);
r.setOaiprovenance(prepareOAIprovenance(doc)); r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
r.setAuthor(prepareAuthors(doc, info)); r.setOriginalId(Arrays.asList(doc.valueOf("//dri:objIdentifier")));
r.setLanguage(prepareLanguages(doc)); r.setCollectedfrom(Arrays.asList(collectedFrom));
r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setPid(
r.setSubject(prepareSubjects(doc, info)); prepareListStructProps(
r.setTitle(prepareTitles(doc, info)); doc,
r.setRelevantdate(prepareRelevantDates(doc, info)); "//oaf:identifier",
r.setDescription(prepareDescriptions(doc, info)); "@identifierType",
r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info)); "dnet:pid_types",
r.setPublisher(preparePublisher(doc, info)); "dnet:pid_types",
r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info)); info));
r.setSource(prepareSources(doc, info)); r.setDateofcollection(doc.valueOf("//dr:dateOfCollection"));
r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation"));
r.setFormat(prepareFormats(doc, info)); r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
r.setContributor(prepareContributors(doc, info)); r.setOaiprovenance(prepareOAIprovenance(doc));
r.setResourcetype(prepareResourceType(doc, info)); r.setAuthor(prepareAuthors(doc, info));
r.setCoverage(prepareCoverages(doc, info)); r.setLanguage(prepareLanguages(doc));
r.setContext(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setCountry(new ArrayList<>()); // NOT PRESENT IN MDSTORES
r.setExternalReference(new ArrayList<>()); // NOT PRESENT IN MDSTORES r.setSubject(prepareSubjects(doc, info));
r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy)); r.setTitle(prepareTitles(doc, info));
} r.setRelevantdate(prepareRelevantDates(doc, info));
r.setDescription(prepareDescriptions(doc, info));
protected abstract Qualifier prepareResourceType(Document doc, DataInfo info); r.setDateofacceptance(prepareField(doc, "//oaf:dateAccepted", info));
r.setPublisher(preparePublisher(doc, info));
protected abstract List<Instance> prepareInstances( r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info));
Document doc, r.setSource(prepareSources(doc, info));
DataInfo info, r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
KeyValue collectedfrom, r.setFormat(prepareFormats(doc, info));
KeyValue hostedby); r.setContributor(prepareContributors(doc, info));
r.setResourcetype(prepareResourceType(doc, info));
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info); r.setCoverage(prepareCoverages(doc, info));
r.setContext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
protected abstract List<StructuredProperty> prepareRelevantDates(Document doc, DataInfo info); r.setExternalReference(new ArrayList<>()); // NOT PRESENT IN MDSTORES
r.setInstance(prepareInstances(doc, info, collectedFrom, hostedBy));
protected abstract List<Field<String>> prepareCoverages(Document doc, DataInfo info); }
protected abstract List<Field<String>> prepareContributors(Document doc, DataInfo info); protected abstract Qualifier prepareResourceType(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareFormats(Document doc, DataInfo info); protected abstract List<Instance> prepareInstances(
Document doc, DataInfo info, KeyValue collectedfrom, KeyValue hostedby);
protected abstract Field<String> preparePublisher(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareSources(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareDescriptions(Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareRelevantDates(Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareTitles(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareCoverages(Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareSubjects(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareContributors(Document doc, DataInfo info);
protected abstract Qualifier prepareLanguages(Document doc);
protected abstract List<Field<String>> prepareFormats(Document doc, DataInfo info);
protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
protected abstract Field<String> preparePublisher(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductTools(
Document doc, protected abstract List<Field<String>> prepareDescriptions(Document doc, DataInfo info);
DataInfo info);
protected abstract List<StructuredProperty> prepareTitles(Document doc, DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
Document doc, protected abstract List<StructuredProperty> prepareSubjects(Document doc, DataInfo info);
DataInfo info);
protected abstract Qualifier prepareLanguages(Document doc);
protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
Document doc, protected abstract List<Author> prepareAuthors(Document doc, DataInfo info);
DataInfo info);
protected abstract List<Field<String>> prepareOtherResearchProductTools(
protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info); Document doc, DataInfo info);
protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info); protected abstract List<Field<String>> prepareOtherResearchProductContactGroups(
Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareSoftwareLicenses(
Document doc, protected abstract List<Field<String>> prepareOtherResearchProductContactPersons(
DataInfo info); Document doc, DataInfo info);
protected abstract List<Field<String>> prepareSoftwareDocumentationUrls( protected abstract Qualifier prepareSoftwareProgrammingLanguage(Document doc, DataInfo info);
Document doc,
DataInfo info); protected abstract Field<String> prepareSoftwareCodeRepositoryUrl(Document doc, DataInfo info);
protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info); protected abstract List<StructuredProperty> prepareSoftwareLicenses(
Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetMetadataVersionNumber(
Document doc, protected abstract List<Field<String>> prepareSoftwareDocumentationUrls(
DataInfo info); Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info); protected abstract List<GeoLocation> prepareDatasetGeoLocations(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetVersion(Document doc, DataInfo info); protected abstract Field<String> prepareDatasetMetadataVersionNumber(
Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetSize(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetLastMetadataUpdate(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetDevice(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetVersion(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetStorageDate(Document doc, DataInfo info);
protected abstract Field<String> prepareDatasetSize(Document doc, DataInfo info);
private Journal prepareJournal(final Document doc, final DataInfo info) {
final Node n = doc.selectSingleNode("//oaf:journal"); protected abstract Field<String> prepareDatasetDevice(Document doc, DataInfo info);
if (n != null) {
final String name = n.getText(); protected abstract Field<String> prepareDatasetStorageDate(Document doc, DataInfo info);
final String issnPrinted = n.valueOf("@issn");
final String issnOnline = n.valueOf("@eissn"); private Journal prepareJournal(final Document doc, final DataInfo info) {
final String issnLinking = n.valueOf("@lissn"); final Node n = doc.selectSingleNode("//oaf:journal");
final String ep = n.valueOf("@ep"); if (n != null) {
final String iss = n.valueOf("@iss"); final String name = n.getText();
final String sp = n.valueOf("@sp"); final String issnPrinted = n.valueOf("@issn");
final String vol = n.valueOf("@vol"); final String issnOnline = n.valueOf("@eissn");
final String edition = n.valueOf("@edition"); final String issnLinking = n.valueOf("@lissn");
if (StringUtils.isNotBlank(name)) { return journal(name, issnPrinted, issnOnline, issnLinking, ep, iss, sp, vol, edition, null, null, info); } final String ep = n.valueOf("@ep");
} final String iss = n.valueOf("@iss");
return null; final String sp = n.valueOf("@sp");
} final String vol = n.valueOf("@vol");
final String edition = n.valueOf("@edition");
protected Qualifier prepareQualifier( if (StringUtils.isNotBlank(name)) {
final Node node, return journal(
final String xpath, name,
final String schemeId, issnPrinted,
final String schemeName) { issnOnline,
final String classId = node.valueOf(xpath); issnLinking,
final String className = code2name.get(classId); ep,
return qualifier(classId, className, schemeId, schemeName); iss,
} sp,
vol,
protected List<StructuredProperty> prepareListStructProps( edition,
final Node node, null,
final String xpath, null,
final String xpathClassId, info);
final String schemeId, }
final String schemeName, }
final DataInfo info) { return null;
final List<StructuredProperty> res = new ArrayList<>(); }
for (final Object o : node.selectNodes(xpath)) {
final Node n = (Node) o; protected Qualifier prepareQualifier(
final String classId = n.valueOf(xpathClassId); final Node node, final String xpath, final String schemeId, final String schemeName) {
final String className = code2name.get(classId); final String classId = node.valueOf(xpath);
res.add(structuredProperty(n.getText(), classId, className, schemeId, schemeName, info)); final String className = code2name.get(classId);
} return qualifier(classId, className, schemeId, schemeName);
return res; }
}
protected List<StructuredProperty> prepareListStructProps(
protected List<StructuredProperty> prepareListStructProps( final Node node,
final Node node, final String xpath,
final String xpath, final String xpathClassId,
final Qualifier qualifier, final String schemeId,
final DataInfo info) { final String schemeName,
final List<StructuredProperty> res = new ArrayList<>(); final DataInfo info) {
for (final Object o : node.selectNodes(xpath)) { final List<StructuredProperty> res = new ArrayList<>();
final Node n = (Node) o; for (final Object o : node.selectNodes(xpath)) {
res.add(structuredProperty(n.getText(), qualifier, info)); final Node n = (Node) o;
} final String classId = n.valueOf(xpathClassId);
return res; final String className = code2name.get(classId);
} res.add(
structuredProperty(
protected List<StructuredProperty> prepareListStructProps( n.getText(), classId, className, schemeId, schemeName, info));
final Node node, }
final String xpath, return res;
final DataInfo info) { }
final List<StructuredProperty> res = new ArrayList<>();
for (final Object o : node.selectNodes(xpath)) { protected List<StructuredProperty> prepareListStructProps(
final Node n = (Node) o; final Node node, final String xpath, final Qualifier qualifier, final DataInfo info) {
res.add(structuredProperty(n.getText(), n.valueOf("@classid"), n.valueOf("@classname"), n.valueOf("@schemeid"), n.valueOf("@schemename"), info)); final List<StructuredProperty> res = new ArrayList<>();
} for (final Object o : node.selectNodes(xpath)) {
return res; final Node n = (Node) o;
} res.add(structuredProperty(n.getText(), qualifier, info));
}
protected OAIProvenance prepareOAIprovenance(final Document doc) { return res;
final Node n = }
doc.selectSingleNode("//*[local-name()='provenance']/*[local-name()='originDescription']");
protected List<StructuredProperty> prepareListStructProps(
if (n == null) { return null; } final Node node, final String xpath, final DataInfo info) {
final List<StructuredProperty> res = new ArrayList<>();
final String identifier = n.valueOf("./*[local-name()='identifier']"); for (final Object o : node.selectNodes(xpath)) {
final String baseURL = n.valueOf("./*[local-name()='baseURL']");; final Node n = (Node) o;
final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");; res.add(
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true"); structuredProperty(
final String datestamp = n.valueOf("./*[local-name()='datestamp']");; n.getText(),
final String harvestDate = n.valueOf("@harvestDate");; n.valueOf("@classid"),
n.valueOf("@classname"),
return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); n.valueOf("@schemeid"),
} n.valueOf("@schemename"),
info));
protected DataInfo prepareDataInfo(final Document doc) { }
final Node n = doc.selectSingleNode("//oaf:datainfo"); return res;
}
if (n == null) { return dataInfo(false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9"); }
protected OAIProvenance prepareOAIprovenance(final Document doc) {
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); final Node n =
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname"); doc.selectSingleNode(
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid"); "//*[local-name()='provenance']/*[local-name()='originDescription']");
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
if (n == null) {
final boolean deletedbyinference = return null;
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference")); }
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred")); final String identifier = n.valueOf("./*[local-name()='identifier']");
final String trust = n.valueOf("./oaf:trust"); final String baseURL = n.valueOf("./*[local-name()='baseURL']");
;
return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); final String metadataNamespace = n.valueOf("./*[local-name()='metadataNamespace']");
} ;
final boolean altered = n.valueOf("@altered").equalsIgnoreCase("true");
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) { final String datestamp = n.valueOf("./*[local-name()='datestamp']");
return field(node.valueOf(xpath), info); ;
} final String harvestDate = n.valueOf("@harvestDate");
;
protected List<Field<String>> prepareListFields(
final Node node, return oaiIProvenance(
final String xpath, identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate);
final DataInfo info) { }
return listFields(info, prepareListString(node, xpath));
} protected DataInfo prepareDataInfo(final Document doc) {
final Node n = doc.selectSingleNode("//oaf:datainfo");
protected List<String> prepareListString(final Node node, final String xpath) {
final List<String> res = new ArrayList<>(); if (n == null) {
for (final Object o : node.selectNodes(xpath)) { return dataInfo(
final String s = ((Node) o).getText().trim(); false,
if (StringUtils.isNotBlank(s)) { null,
res.add(s); false,
} false,
} MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS,
return res; "0.9");
} }
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
final String paClassName = n.valueOf("./oaf:provenanceaction/@classname");
final String paSchemeId = n.valueOf("./oaf:provenanceaction/@schemeid");
final String paSchemeName = n.valueOf("./oaf:provenanceaction/@schemename");
final boolean deletedbyinference =
Boolean.parseBoolean(n.valueOf("./oaf:deletedbyinference"));
final String inferenceprovenance = n.valueOf("./oaf:inferenceprovenance");
final Boolean inferred = Boolean.parseBoolean(n.valueOf("./oaf:inferred"));
final String trust = n.valueOf("./oaf:trust");
return dataInfo(
deletedbyinference,
inferenceprovenance,
inferred,
false,
qualifier(paClassId, paClassName, paSchemeId, paSchemeName),
trust);
}
protected Field<String> prepareField(final Node node, final String xpath, final DataInfo info) {
return field(node.valueOf(xpath), info);
}
protected List<Field<String>> prepareListFields(
final Node node, final String xpath, final DataInfo info) {
return listFields(info, prepareListString(node, xpath));
}
protected List<String> prepareListString(final Node node, final String xpath) {
final List<String> res = new ArrayList<>();
for (final Object o : node.selectNodes(xpath)) {
final String s = ((Node) o).getText().trim();
if (StringUtils.isNotBlank(s)) {
res.add(s);
}
}
return res;
}
} }

View File

@ -6,17 +6,28 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
public class MigrationConstants { public class MigrationConstants {
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER =
qualifier("publication", "publication", "dnet:result_typologies", "dnet:result_typologies"); qualifier(
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER = "publication",
qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies"); "publication",
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = "dnet:result_typologies",
qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies"); "dnet:result_typologies");
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER = public static final Qualifier DATASET_RESULTTYPE_QUALIFIER =
qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies"); qualifier("dataset", "dataset", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER =
qualifier("sysimport:crosswalk:repository", "sysimport:crosswalk:repository", "dnet:provenanceActions", "dnet:provenanceActions"); qualifier("software", "software", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = public static final Qualifier OTHER_RESULTTYPE_QUALIFIER =
qualifier("sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry", "dnet:provenanceActions", "dnet:provenanceActions"); qualifier("other", "other", "dnet:result_typologies", "dnet:result_typologies");
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS =
qualifier(
"sysimport:crosswalk:repository",
"sysimport:crosswalk:repository",
"dnet:provenanceActions",
"dnet:provenanceActions");
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION =
qualifier(
"sysimport:crosswalk:entityregistry",
"sysimport:crosswalk:entityregistry",
"dnet:provenanceActions",
"dnet:provenanceActions");
} }