diff --git a/.gitignore b/.gitignore
index f4fb46f2e..73d9179fa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,8 +3,6 @@
*.iws
*.ipr
*.iml
-*.ipr
-*.iws
*~
.vscode
.metals
@@ -27,4 +25,4 @@ spark-warehouse
/**/job-override.properties
/**/*.log
/**/.factorypath
-
+/**/.scalafmt.conf
diff --git a/.scalafmt.conf b/.scalafmt.conf
new file mode 100644
index 000000000..0b5dbe0b4
--- /dev/null
+++ b/.scalafmt.conf
@@ -0,0 +1,21 @@
+style = defaultWithAlign
+
+align.openParenCallSite = false
+align.openParenDefnSite = false
+align.tokens = [{code = "->"}, {code = "<-"}, {code = "=>", owner = "Case"}]
+continuationIndent.callSite = 2
+continuationIndent.defnSite = 2
+danglingParentheses = true
+indentOperator = spray
+maxColumn = 120
+newlines.alwaysBeforeTopLevelStatements = true
+project.excludeFilters = [".*\\.sbt"]
+rewrite.rules = [AvoidInfix]
+rewrite.rules = [ExpandImportSelectors]
+rewrite.rules = [RedundantBraces]
+rewrite.rules = [RedundantParens]
+rewrite.rules = [SortImports]
+rewrite.rules = [SortModifiers]
+rewrite.rules = [PreferCurlyFors]
+spaces.inImportCurlyBraces = false
+unindentTopLevelOperators = true
\ No newline at end of file
diff --git a/dhp-build/dhp-build-assembly-resources/pom.xml b/dhp-build/dhp-build-assembly-resources/pom.xml
index 012ff89a3..44165995d 100644
--- a/dhp-build/dhp-build-assembly-resources/pom.xml
+++ b/dhp-build/dhp-build-assembly-resources/pom.xml
@@ -6,7 +6,7 @@
eu.dnetlib.dhp
dhp-build
- 1.2.4-SNAPSHOT
+ 1.2.5-SNAPSHOT
dhp-build-assembly-resources
diff --git a/dhp-build/dhp-build-properties-maven-plugin/pom.xml b/dhp-build/dhp-build-properties-maven-plugin/pom.xml
index 256017e2c..7579bdf45 100644
--- a/dhp-build/dhp-build-properties-maven-plugin/pom.xml
+++ b/dhp-build/dhp-build-properties-maven-plugin/pom.xml
@@ -6,7 +6,7 @@
eu.dnetlib.dhp
dhp-build
- 1.2.4-SNAPSHOT
+ 1.2.5-SNAPSHOT
dhp-build-properties-maven-plugin
diff --git a/dhp-build/dhp-code-style/pom.xml b/dhp-build/dhp-code-style/pom.xml
index 77aa2aedb..5a86efe17 100644
--- a/dhp-build/dhp-code-style/pom.xml
+++ b/dhp-build/dhp-code-style/pom.xml
@@ -5,7 +5,7 @@
eu.dnetlib.dhp
dhp-code-style
- 1.2.4-SNAPSHOT
+ 1.2.5-SNAPSHOT
jar
@@ -22,9 +22,20 @@
dnet45-releases
https://maven.d4science.org/nexus/content/repositories/dnet45-releases
+
+ DHPSite
+ ${dhp.site.stage.path}/dhp-build/dhp-code-style
+
+
+
+ org.apache.maven.wagon
+ wagon-ssh
+ 2.10
+
+
@@ -35,14 +46,19 @@
org.apache.maven.plugins
maven-site-plugin
- 3.7.1
+ 3.9.1
+
+ true
+
+
UTF-8
+ sftp://dnet-hadoop@static-web.d4science.org/dnet-hadoop
\ No newline at end of file
diff --git a/dhp-build/dhp-code-style/src/main/resources/scalafmt/scalafmt.conf b/dhp-build/dhp-code-style/src/main/resources/scalafmt/scalafmt.conf
new file mode 100644
index 000000000..0b5dbe0b4
--- /dev/null
+++ b/dhp-build/dhp-code-style/src/main/resources/scalafmt/scalafmt.conf
@@ -0,0 +1,21 @@
+style = defaultWithAlign
+
+align.openParenCallSite = false
+align.openParenDefnSite = false
+align.tokens = [{code = "->"}, {code = "<-"}, {code = "=>", owner = "Case"}]
+continuationIndent.callSite = 2
+continuationIndent.defnSite = 2
+danglingParentheses = true
+indentOperator = spray
+maxColumn = 120
+newlines.alwaysBeforeTopLevelStatements = true
+project.excludeFilters = [".*\\.sbt"]
+rewrite.rules = [AvoidInfix]
+rewrite.rules = [ExpandImportSelectors]
+rewrite.rules = [RedundantBraces]
+rewrite.rules = [RedundantParens]
+rewrite.rules = [SortImports]
+rewrite.rules = [SortModifiers]
+rewrite.rules = [PreferCurlyFors]
+spaces.inImportCurlyBraces = false
+unindentTopLevelOperators = true
\ No newline at end of file
diff --git a/dhp-build/dhp-code-style/src/site/site.xml b/dhp-build/dhp-code-style/src/site/site.xml
new file mode 100644
index 000000000..634a2c154
--- /dev/null
+++ b/dhp-build/dhp-code-style/src/site/site.xml
@@ -0,0 +1,21 @@
+
+
+
+ org.apache.maven.skins
+ maven-fluido-skin
+ 1.8
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dhp-build/pom.xml b/dhp-build/pom.xml
index 12b999b9c..9040ea94e 100644
--- a/dhp-build/pom.xml
+++ b/dhp-build/pom.xml
@@ -4,12 +4,15 @@
eu.dnetlib.dhp
dhp
- 1.2.4-SNAPSHOT
+ 1.2.5-SNAPSHOT
dhp-build
pom
This module is a container for the build tools used in dnet-hadoop
+
+ true
+
dhp-code-style
@@ -17,4 +20,12 @@
dhp-build-properties-maven-plugin
+
+
+
+ DHPSite
+ ${dhp.site.stage.path}/dhp-build/
+
+
+
diff --git a/dhp-build/src/site/site.xml b/dhp-build/src/site/site.xml
new file mode 100644
index 000000000..2d9d769a2
--- /dev/null
+++ b/dhp-build/src/site/site.xml
@@ -0,0 +1,22 @@
+
+
+
+ org.apache.maven.skins
+ maven-fluido-skin
+ 1.8
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dhp-common/pom.xml b/dhp-common/pom.xml
index c057123b1..6198bd81e 100644
--- a/dhp-common/pom.xml
+++ b/dhp-common/pom.xml
@@ -5,7 +5,7 @@
eu.dnetlib.dhp
dhp
- 1.2.4-SNAPSHOT
+ 1.2.5-SNAPSHOT
../pom.xml
@@ -13,9 +13,60 @@
dhp-common
jar
+
+
+ DHPSite
+ ${dhp.site.stage.path}/dhp-common
+
+
+
This module contains common utilities meant to be used across the dnet-hadoop submodules
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ ${net.alchim31.maven.version}
+
+
+ scala-compile-first
+ initialize
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+ scala-doc
+ process-resources
+
+ doc
+
+
+
+
+ true
+ ${scala.binary.version}
+ ${scala.version}
+
+
+
+
+
+
+ eu.dnetlib.dhp
+ dhp-pace-core
+ ${project.version}
+
org.apache.hadoop
@@ -32,11 +83,11 @@
org.apache.spark
- spark-core_2.11
+ spark-core_${scala.binary.version}
org.apache.spark
- spark-sql_2.11
+ spark-sql_${scala.binary.version}
@@ -98,11 +149,6 @@
okhttp
-
- eu.dnetlib
- dnet-pace-core
-
-
org.apache.httpcomponents
httpclient
@@ -115,7 +161,7 @@
eu.dnetlib.dhp
- dhp-schemas
+ ${dhp-schemas.artifact}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/Constants.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/Constants.java
index a62a0ac79..4f2c6341e 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/Constants.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/Constants.java
@@ -10,6 +10,12 @@ public class Constants {
public static final Map accessRightsCoarMap = Maps.newHashMap();
public static final Map coarCodeLabelMap = Maps.newHashMap();
+ public static final String ROR_NS_PREFIX = "ror_________";
+
+ public static final String ROR_OPENAIRE_ID = "10|openaire____::993a7ae7a863813cf95028b50708e222";
+
+ public static final String ROR_DATASOURCE_NAME = "Research Organization Registry (ROR)";
+
public static String COAR_ACCESS_RIGHT_SCHEMA = "http://vocabularies.coar-repositories.org/documentation/access_rights/";
private Constants() {
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/GraphResultMapper.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/GraphResultMapper.java
deleted file mode 100644
index 8ceee5c8a..000000000
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/GraphResultMapper.java
+++ /dev/null
@@ -1,413 +0,0 @@
-
-package eu.dnetlib.dhp.common;
-
-import java.io.Serializable;
-import java.util.*;
-import java.util.stream.Collectors;
-
-import eu.dnetlib.dhp.schema.common.ModelConstants;
-import eu.dnetlib.dhp.schema.dump.oaf.*;
-import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityInstance;
-import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
-import eu.dnetlib.dhp.schema.oaf.DataInfo;
-import eu.dnetlib.dhp.schema.oaf.Field;
-import eu.dnetlib.dhp.schema.oaf.Journal;
-import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
-
-public class GraphResultMapper implements Serializable {
-
- public static Result map(
- E in) {
-
- CommunityResult out = new CommunityResult();
-
- eu.dnetlib.dhp.schema.oaf.Result input = (eu.dnetlib.dhp.schema.oaf.Result) in;
- Optional ort = Optional.ofNullable(input.getResulttype());
- if (ort.isPresent()) {
- switch (ort.get().getClassid()) {
- case "publication":
- Optional journal = Optional
- .ofNullable(((eu.dnetlib.dhp.schema.oaf.Publication) input).getJournal());
- if (journal.isPresent()) {
- Journal j = journal.get();
- Container c = new Container();
- c.setConferencedate(j.getConferencedate());
- c.setConferenceplace(j.getConferenceplace());
- c.setEdition(j.getEdition());
- c.setEp(j.getEp());
- c.setIss(j.getIss());
- c.setIssnLinking(j.getIssnLinking());
- c.setIssnOnline(j.getIssnOnline());
- c.setIssnPrinted(j.getIssnPrinted());
- c.setName(j.getName());
- c.setSp(j.getSp());
- c.setVol(j.getVol());
- out.setContainer(c);
- out.setType(ModelConstants.PUBLICATION_DEFAULT_RESULTTYPE.getClassname());
- }
- break;
- case "dataset":
- eu.dnetlib.dhp.schema.oaf.Dataset id = (eu.dnetlib.dhp.schema.oaf.Dataset) input;
- Optional.ofNullable(id.getSize()).ifPresent(v -> out.setSize(v.getValue()));
- Optional.ofNullable(id.getVersion()).ifPresent(v -> out.setVersion(v.getValue()));
-
- out
- .setGeolocation(
- Optional
- .ofNullable(id.getGeolocation())
- .map(
- igl -> igl
- .stream()
- .filter(Objects::nonNull)
- .map(gli -> {
- GeoLocation gl = new GeoLocation();
- gl.setBox(gli.getBox());
- gl.setPlace(gli.getPlace());
- gl.setPoint(gli.getPoint());
- return gl;
- })
- .collect(Collectors.toList()))
- .orElse(null));
-
- out.setType(ModelConstants.DATASET_DEFAULT_RESULTTYPE.getClassname());
- break;
- case "software":
-
- eu.dnetlib.dhp.schema.oaf.Software is = (eu.dnetlib.dhp.schema.oaf.Software) input;
- Optional
- .ofNullable(is.getCodeRepositoryUrl())
- .ifPresent(value -> out.setCodeRepositoryUrl(value.getValue()));
- Optional
- .ofNullable(is.getDocumentationUrl())
- .ifPresent(
- value -> out
- .setDocumentationUrl(
- value
- .stream()
- .map(Field::getValue)
- .collect(Collectors.toList())));
-
- Optional
- .ofNullable(is.getProgrammingLanguage())
- .ifPresent(value -> out.setProgrammingLanguage(value.getClassid()));
-
- out.setType(ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE.getClassname());
- break;
- case "other":
-
- eu.dnetlib.dhp.schema.oaf.OtherResearchProduct ir = (eu.dnetlib.dhp.schema.oaf.OtherResearchProduct) input;
- out
- .setContactgroup(
- Optional
- .ofNullable(ir.getContactgroup())
- .map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
- .orElse(null));
-
- out
- .setContactperson(
- Optional
- .ofNullable(ir.getContactperson())
- .map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
- .orElse(null));
- out
- .setTool(
- Optional
- .ofNullable(ir.getTool())
- .map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
- .orElse(null));
-
- out.setType(ModelConstants.ORP_DEFAULT_RESULTTYPE.getClassname());
-
- break;
- }
-
- Optional
- .ofNullable(input.getAuthor())
- .ifPresent(
- ats -> out.setAuthor(ats.stream().map(GraphResultMapper::getAuthor).collect(Collectors.toList())));
-
- // I do not map Access Right UNKNOWN or OTHER
-
- Optional oar = Optional.ofNullable(input.getBestaccessright());
- if (oar.isPresent()) {
- if (Constants.accessRightsCoarMap.containsKey(oar.get().getClassid())) {
- String code = Constants.accessRightsCoarMap.get(oar.get().getClassid());
- out
- .setBestaccessright(
- AccessRight
- .newInstance(
- code,
- Constants.coarCodeLabelMap.get(code),
- Constants.COAR_ACCESS_RIGHT_SCHEMA));
- }
- }
-
- final List contributorList = new ArrayList<>();
- Optional
- .ofNullable(input.getContributor())
- .ifPresent(value -> value.stream().forEach(c -> contributorList.add(c.getValue())));
- out.setContributor(contributorList);
-
- Optional
- .ofNullable(input.getCountry())
- .ifPresent(
- value -> out
- .setCountry(
- value
- .stream()
- .map(
- c -> {
- if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
- return null;
- }
- Country country = new Country();
- country.setCode(c.getClassid());
- country.setLabel(c.getClassname());
- Optional
- .ofNullable(c.getDataInfo())
- .ifPresent(
- provenance -> country
- .setProvenance(
- Provenance
- .newInstance(
- provenance
- .getProvenanceaction()
- .getClassname(),
- c.getDataInfo().getTrust())));
- return country;
- })
- .filter(Objects::nonNull)
- .collect(Collectors.toList())));
-
- final List coverageList = new ArrayList<>();
- Optional
- .ofNullable(input.getCoverage())
- .ifPresent(value -> value.stream().forEach(c -> coverageList.add(c.getValue())));
- out.setCoverage(coverageList);
-
- out.setDateofcollection(input.getDateofcollection());
-
- final List descriptionList = new ArrayList<>();
- Optional
- .ofNullable(input.getDescription())
- .ifPresent(value -> value.forEach(d -> descriptionList.add(d.getValue())));
- out.setDescription(descriptionList);
- Optional> oStr = Optional.ofNullable(input.getEmbargoenddate());
- if (oStr.isPresent()) {
- out.setEmbargoenddate(oStr.get().getValue());
- }
-
- final List formatList = new ArrayList<>();
- Optional
- .ofNullable(input.getFormat())
- .ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
- out.setFormat(formatList);
- out.setId(input.getId());
- out.setOriginalId(input.getOriginalId());
-
- Optional> oInst = Optional
- .ofNullable(input.getInstance());
-
- if (oInst.isPresent()) {
- out
- .setInstance(
- oInst.get().stream().map(GraphResultMapper::getInstance).collect(Collectors.toList()));
-
- }
-
- Optional oL = Optional.ofNullable(input.getLanguage());
- if (oL.isPresent()) {
- eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
- out.setLanguage(Qualifier.newInstance(language.getClassid(), language.getClassname()));
- }
- Optional oLong = Optional.ofNullable(input.getLastupdatetimestamp());
- if (oLong.isPresent()) {
- out.setLastupdatetimestamp(oLong.get());
- }
- Optional> otitle = Optional.ofNullable(input.getTitle());
- if (otitle.isPresent()) {
- List iTitle = otitle
- .get()
- .stream()
- .filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("main title"))
- .collect(Collectors.toList());
- if (!iTitle.isEmpty()) {
- out.setMaintitle(iTitle.get(0).getValue());
- }
-
- iTitle = otitle
- .get()
- .stream()
- .filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("subtitle"))
- .collect(Collectors.toList());
- if (!iTitle.isEmpty()) {
- out.setSubtitle(iTitle.get(0).getValue());
- }
-
- }
-
- List pids = new ArrayList<>();
- Optional
- .ofNullable(input.getPid())
- .ifPresent(
- value -> value
- .stream()
- .forEach(
- p -> pids
- .add(
- ControlledField
- .newInstance(p.getQualifier().getClassid(), p.getValue()))));
- out.setPid(pids);
- oStr = Optional.ofNullable(input.getDateofacceptance());
- if (oStr.isPresent()) {
- out.setPublicationdate(oStr.get().getValue());
- }
- oStr = Optional.ofNullable(input.getPublisher());
- if (oStr.isPresent()) {
- out.setPublisher(oStr.get().getValue());
- }
-
- List sourceList = new ArrayList<>();
- Optional
- .ofNullable(input.getSource())
- .ifPresent(value -> value.stream().forEach(s -> sourceList.add(s.getValue())));
- // out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList()));
- List subjectList = new ArrayList<>();
- Optional
- .ofNullable(input.getSubject())
- .ifPresent(
- value -> value
- .forEach(s -> subjectList.add(getSubject(s))));
-
- out.setSubjects(subjectList);
-
- out.setType(input.getResulttype().getClassid());
- }
-
- out
- .setCollectedfrom(
- input
- .getCollectedfrom()
- .stream()
- .map(cf -> KeyValue.newInstance(cf.getKey(), cf.getValue()))
- .collect(Collectors.toList()));
-
- return out;
-
- }
-
- private static CommunityInstance getInstance(eu.dnetlib.dhp.schema.oaf.Instance i) {
- CommunityInstance instance = new CommunityInstance();
-
- setCommonValue(i, instance);
-
- instance
- .setCollectedfrom(
- KeyValue
- .newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
-
- instance
- .setHostedby(
- KeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
-
- return instance;
-
- }
-
- private static void setCommonValue(eu.dnetlib.dhp.schema.oaf.Instance i, I instance) {
- Optional opAr = Optional
- .ofNullable(i.getAccessright());
- if (opAr.isPresent()) {
- if (Constants.accessRightsCoarMap.containsKey(opAr.get().getClassid())) {
- String code = Constants.accessRightsCoarMap.get(opAr.get().getClassid());
- instance
- .setAccessright(
- AccessRight
- .newInstance(
- code,
- Constants.coarCodeLabelMap.get(code),
- Constants.COAR_ACCESS_RIGHT_SCHEMA));
- }
- }
-
- Optional
- .ofNullable(i.getLicense())
- .ifPresent(value -> instance.setLicense(value.getValue()));
- Optional
- .ofNullable(i.getDateofacceptance())
- .ifPresent(value -> instance.setPublicationdate(value.getValue()));
- Optional
- .ofNullable(i.getRefereed())
- .ifPresent(value -> instance.setRefereed(value.getClassname()));
- Optional
- .ofNullable(i.getInstancetype())
- .ifPresent(value -> instance.setType(value.getClassname()));
- Optional.ofNullable(i.getUrl()).ifPresent(value -> instance.setUrl(value));
-
- }
-
- private static Subject getSubject(StructuredProperty s) {
- Subject subject = new Subject();
- subject.setSubject(ControlledField.newInstance(s.getQualifier().getClassid(), s.getValue()));
- Optional di = Optional.ofNullable(s.getDataInfo());
- if (di.isPresent()) {
- Provenance p = new Provenance();
- p.setProvenance(di.get().getProvenanceaction().getClassname());
- p.setTrust(di.get().getTrust());
- subject.setProvenance(p);
- }
-
- return subject;
- }
-
- private static Author getAuthor(eu.dnetlib.dhp.schema.oaf.Author oa) {
- Author a = new Author();
- a.setFullname(oa.getFullname());
- a.setName(oa.getName());
- a.setSurname(oa.getSurname());
- a.setRank(oa.getRank());
-
- Optional> oPids = Optional
- .ofNullable(oa.getPid());
- if (oPids.isPresent()) {
- Pid pid = getOrcid(oPids.get());
- if (pid != null) {
- a.setPid(pid);
- }
- }
-
- return a;
- }
-
- private static Pid getOrcid(List p) {
- for (StructuredProperty pid : p) {
- if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID)) {
- Optional di = Optional.ofNullable(pid.getDataInfo());
- if (di.isPresent()) {
- return Pid
- .newInstance(
- ControlledField
- .newInstance(
- pid.getQualifier().getClassid(),
- pid.getValue()),
- Provenance
- .newInstance(
- di.get().getProvenanceaction().getClassname(),
- di.get().getTrust()));
- } else {
- return Pid
- .newInstance(
- ControlledField
- .newInstance(
- pid.getQualifier().getClassid(),
- pid.getValue())
-
- );
- }
-
- }
- }
- return null;
- }
-
-}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MDStoreInfo.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MDStoreInfo.java
new file mode 100644
index 000000000..bd1ccca50
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MDStoreInfo.java
@@ -0,0 +1,100 @@
+
+package eu.dnetlib.dhp.common;
+
+/**
+ * This utility represent the Metadata Store information
+ * needed during the migration from mongo to HDFS to store
+ */
+public class MDStoreInfo {
+ private String mdstore;
+ private String currentId;
+ private Long latestTimestamp;
+
+ /**
+ * Instantiates a new Md store info.
+ */
+ public MDStoreInfo() {
+ }
+
+ /**
+ * Instantiates a new Md store info.
+ *
+ * @param mdstore the mdstore
+ * @param currentId the current id
+ * @param latestTimestamp the latest timestamp
+ */
+ public MDStoreInfo(String mdstore, String currentId, Long latestTimestamp) {
+ this.mdstore = mdstore;
+ this.currentId = currentId;
+ this.latestTimestamp = latestTimestamp;
+ }
+
+ /**
+ * Gets mdstore.
+ *
+ * @return the mdstore
+ */
+ public String getMdstore() {
+ return mdstore;
+ }
+
+ /**
+ * Sets mdstore.
+ *
+ * @param mdstore the mdstore
+ * @return the mdstore
+ */
+ public MDStoreInfo setMdstore(String mdstore) {
+ this.mdstore = mdstore;
+ return this;
+ }
+
+ /**
+ * Gets current id.
+ *
+ * @return the current id
+ */
+ public String getCurrentId() {
+ return currentId;
+ }
+
+ /**
+ * Sets current id.
+ *
+ * @param currentId the current id
+ * @return the current id
+ */
+ public MDStoreInfo setCurrentId(String currentId) {
+ this.currentId = currentId;
+ return this;
+ }
+
+ /**
+ * Gets latest timestamp.
+ *
+ * @return the latest timestamp
+ */
+ public Long getLatestTimestamp() {
+ return latestTimestamp;
+ }
+
+ /**
+ * Sets latest timestamp.
+ *
+ * @param latestTimestamp the latest timestamp
+ * @return the latest timestamp
+ */
+ public MDStoreInfo setLatestTimestamp(Long latestTimestamp) {
+ this.latestTimestamp = latestTimestamp;
+ return this;
+ }
+
+ @Override
+ public String toString() {
+ return "MDStoreInfo{" +
+ "mdstore='" + mdstore + '\'' +
+ ", currentId='" + currentId + '\'' +
+ ", latestTimestamp=" + latestTimestamp +
+ '}';
+ }
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java
index abb9dc148..eca433e9e 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MakeTarArchive.java
@@ -5,13 +5,71 @@ import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
+import java.util.Optional;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import eu.dnetlib.dhp.application.ArgumentApplicationParser;
public class MakeTarArchive implements Serializable {
+ private static final Logger log = LoggerFactory.getLogger(MakeTarArchive.class);
+
+ public static void main(String[] args) throws Exception {
+ String jsonConfiguration = IOUtils
+ .toString(
+ MakeTarArchive.class
+ .getResourceAsStream(
+ "/eu/dnetlib/dhp/common/input_maketar_parameters.json"));
+
+ final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
+ parser.parseArgument(args);
+
+ final String outputPath = parser.get("hdfsPath");
+ log.info("hdfsPath: {}", outputPath);
+
+ final String hdfsNameNode = parser.get("nameNode");
+ log.info("nameNode: {}", hdfsNameNode);
+
+ final String inputPath = parser.get("sourcePath");
+ log.info("input path : {}", inputPath);
+
+ final int gBperSplit = Optional
+ .ofNullable(parser.get("splitSize"))
+ .map(Integer::valueOf)
+ .orElse(10);
+
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", hdfsNameNode);
+
+ FileSystem fileSystem = FileSystem.get(conf);
+
+ makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit);
+
+ }
+
+ public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
+ throws IOException {
+
+ RemoteIterator dirIterator = fileSystem.listLocatedStatus(new Path(inputPath));
+
+ while (dirIterator.hasNext()) {
+ LocatedFileStatus fileStatus = dirIterator.next();
+
+ Path p = fileStatus.getPath();
+ String pathString = p.toString();
+ String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
+
+ MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit);
+ }
+ }
+
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
Path hdfsWritePath = new Path(outputPath);
if (fileSystem.exists(hdfsWritePath)) {
@@ -21,7 +79,7 @@ public class MakeTarArchive implements Serializable {
return new TarArchiveOutputStream(fileSystem.create(hdfsWritePath).getWrappedStream());
}
- private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
+ private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dirName)
throws IOException {
Path hdfsWritePath = new Path(outputPath);
@@ -37,7 +95,7 @@ public class MakeTarArchive implements Serializable {
new Path(inputPath), true);
while (iterator.hasNext()) {
- writeCurrentFile(fileSystem, dir_name, iterator, ar, 0);
+ writeCurrentFile(fileSystem, dirName, iterator, ar, 0);
}
}
@@ -59,32 +117,30 @@ public class MakeTarArchive implements Serializable {
new Path(inputPath), true);
boolean next = fileStatusListIterator.hasNext();
while (next) {
- TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
+ try (TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar")) {
- long current_size = 0;
- while (next && current_size < bytesPerSplit) {
- current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size);
- next = fileStatusListIterator.hasNext();
+ long currentSize = 0;
+ while (next && currentSize < bytesPerSplit) {
+ currentSize = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, currentSize);
+ next = fileStatusListIterator.hasNext();
+ }
+
+ partNum += 1;
}
-
- partNum += 1;
- ar.close();
}
-
}
-
}
- private static long writeCurrentFile(FileSystem fileSystem, String dir_name,
+ private static long writeCurrentFile(FileSystem fileSystem, String dirName,
RemoteIterator fileStatusListIterator,
- TarArchiveOutputStream ar, long current_size) throws IOException {
+ TarArchiveOutputStream ar, long currentSize) throws IOException {
LocatedFileStatus fileStatus = fileStatusListIterator.next();
Path p = fileStatus.getPath();
- String p_string = p.toString();
- if (!p_string.endsWith("_SUCCESS")) {
- String name = p_string.substring(p_string.lastIndexOf("/") + 1);
+ String pString = p.toString();
+ if (!pString.endsWith("_SUCCESS")) {
+ String name = pString.substring(pString.lastIndexOf("/") + 1);
if (name.startsWith("part-") & name.length() > 10) {
String tmp = name.substring(0, 10);
if (name.contains(".")) {
@@ -92,9 +148,9 @@ public class MakeTarArchive implements Serializable {
}
name = tmp;
}
- TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
+ TarArchiveEntry entry = new TarArchiveEntry(dirName + "/" + name);
entry.setSize(fileStatus.getLen());
- current_size += fileStatus.getLen();
+ currentSize += fileStatus.getLen();
ar.putArchiveEntry(entry);
InputStream is = fileSystem.open(fileStatus.getPath());
@@ -110,7 +166,7 @@ public class MakeTarArchive implements Serializable {
ar.closeArchiveEntry();
}
- return current_size;
+ return currentSize;
}
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MdstoreClient.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MdstoreClient.java
index d06544ae1..34aa37be5 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/MdstoreClient.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/MdstoreClient.java
@@ -1,12 +1,12 @@
package eu.dnetlib.dhp.common;
+import static com.mongodb.client.model.Sorts.descending;
+
import java.io.Closeable;
import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
+import java.util.*;
+import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.commons.lang3.StringUtils;
@@ -38,6 +38,26 @@ public class MdstoreClient implements Closeable {
this.db = getDb(client, dbName);
}
+ private Long parseTimestamp(Document f) {
+ if (f == null || !f.containsKey("timestamp"))
+ return null;
+
+ Object ts = f.get("timestamp");
+
+ return Long.parseLong(ts.toString());
+ }
+
+ public Long getLatestTimestamp(final String collectionId) {
+ MongoCollection collection = db.getCollection(collectionId);
+ FindIterable result = collection.find().sort(descending("timestamp")).limit(1);
+ if (result == null) {
+ return null;
+ }
+
+ Document f = result.first();
+ return parseTimestamp(f);
+ }
+
public MongoCollection mdStore(final String mdId) {
BasicDBObject query = (BasicDBObject) QueryBuilder.start("mdId").is(mdId).get();
@@ -54,6 +74,16 @@ public class MdstoreClient implements Closeable {
return getColl(db, currentId, true);
}
+ public List mdStoreWithTimestamp(final String mdFormat, final String mdLayout,
+ final String mdInterpretation) {
+ Map res = validCollections(mdFormat, mdLayout, mdInterpretation);
+ return res
+ .entrySet()
+ .stream()
+ .map(e -> new MDStoreInfo(e.getKey(), e.getValue(), getLatestTimestamp(e.getValue())))
+ .collect(Collectors.toList());
+ }
+
public Map validCollections(
final String mdFormat, final String mdLayout, final String mdInterpretation) {
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java
index 91c6c1825..fac9a7565 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java
@@ -1,18 +1,18 @@
package eu.dnetlib.dhp.common;
+import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.text.Normalizer;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
+import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.text.WordUtils;
+import com.ctc.wstx.dtd.LargePrefixedNameSet;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
-import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.hash.Hashing;
@@ -29,7 +29,19 @@ public class PacePerson {
private List fullname = Lists.newArrayList();
private final String original;
- private static Set particles = null;
+ private static Set particles;
+
+ static {
+ try {
+ particles = new HashSet<>(IOUtils
+ .readLines(
+ PacePerson.class
+ .getResourceAsStream(
+ "/eu/dnetlib/dhp/common/name_particles.txt")));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
/**
* Capitalizes a string
@@ -37,29 +49,20 @@ public class PacePerson {
* @param s the string to capitalize
* @return the input string with capital letter
*/
- public static final String capitalize(final String s) {
+ public static String capitalize(final String s) {
+ if (particles.contains(s)) {
+ return s;
+ }
return WordUtils.capitalize(s.toLowerCase(), ' ', '-');
}
/**
* Adds a dot to a string with length equals to 1
*/
- public static final String dotAbbreviations(final String s) {
+ public static String dotAbbreviations(final String s) {
return s.length() == 1 ? s + "." : s;
}
- public static Set loadFromClasspath(final String classpath) {
- final Set h = new HashSet<>();
- try {
- for (final String s : IOUtils.readLines(PacePerson.class.getResourceAsStream(classpath))) {
- h.add(s);
- }
- } catch (final Throwable e) {
- return new HashSet<>();
- }
- return h;
- }
-
/**
* The constructor of the class. It fills the fields of the class basing on the input fullname.
*
@@ -128,10 +131,6 @@ public class PacePerson {
}
private List splitTerms(final String s) {
- if (particles == null) {
- particles = loadFromClasspath("/eu/dnetlib/dhp/oa/graph/pace/name_particles.txt");
- }
-
final List list = Lists.newArrayList();
for (final String part : Splitter.on(" ").omitEmptyStrings().split(s)) {
if (!particles.contains(part.toLowerCase())) {
@@ -187,17 +186,36 @@ public class PacePerson {
}
public List getCapitalFirstnames() {
- return Lists
- .newArrayList(
- Iterables.transform(getNameWithAbbreviations(), PacePerson::capitalize));
+ return Optional
+ .ofNullable(getNameWithAbbreviations())
+ .map(
+ name -> name
+ .stream()
+ .map(PacePerson::capitalize)
+ .collect(Collectors.toList()))
+ .orElse(new ArrayList<>());
}
public List getCapitalSurname() {
- return Lists.newArrayList(Iterables.transform(surname, PacePerson::capitalize));
+ return Optional
+ .ofNullable(getSurname())
+ .map(
+ surname -> surname
+ .stream()
+ .map(PacePerson::capitalize)
+ .collect(Collectors.toList()))
+ .orElse(new ArrayList<>());
}
public List getNameWithAbbreviations() {
- return Lists.newArrayList(Iterables.transform(name, PacePerson::dotAbbreviations));
+ return Optional
+ .ofNullable(getName())
+ .map(
+ name -> name
+ .stream()
+ .map(PacePerson::dotAbbreviations)
+ .collect(Collectors.toList()))
+ .orElse(new ArrayList<>());
}
public boolean isAccurate() {
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/ReadDatasourceMasterDuplicateFromDB.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/ReadDatasourceMasterDuplicateFromDB.java
new file mode 100644
index 000000000..5d39216f1
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/ReadDatasourceMasterDuplicateFromDB.java
@@ -0,0 +1,81 @@
+
+package eu.dnetlib.dhp.common.action;
+
+import java.io.BufferedWriter;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import eu.dnetlib.dhp.common.DbClient;
+import eu.dnetlib.dhp.common.action.model.MasterDuplicate;
+import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
+
+public class ReadDatasourceMasterDuplicateFromDB {
+
+ private static final Logger log = LoggerFactory.getLogger(ReadDatasourceMasterDuplicateFromDB.class);
+
+ private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
+
+ private static final String QUERY = "SELECT distinct dd.id as masterId, d.officialname as masterName, dd.duplicate as duplicateId "
+ +
+ "FROM dsm_dedup_services dd join dsm_services d on (dd.id = d.id);";
+
+ public static int execute(String dbUrl, String dbUser, String dbPassword, String hdfsPath, String hdfsNameNode)
+ throws IOException {
+ int count = 0;
+ try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", hdfsNameNode);
+ FileSystem fileSystem = FileSystem.get(conf);
+ FSDataOutputStream fos = fileSystem.create(new Path(hdfsPath));
+
+ log.info("running query: {}", QUERY);
+ log.info("storing results in: {}", hdfsPath);
+
+ try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8))) {
+ dbClient.processResults(QUERY, rs -> writeMap(datasourceMasterMap(rs), writer));
+ count++;
+ }
+ }
+ return count;
+ }
+
+ private static MasterDuplicate datasourceMasterMap(ResultSet rs) {
+ try {
+ final MasterDuplicate md = new MasterDuplicate();
+
+ final String duplicateId = rs.getString("duplicateId");
+ final String masterId = rs.getString("masterId");
+ final String masterName = rs.getString("masterName");
+
+ md.setDuplicateId(OafMapperUtils.createOpenaireId(10, duplicateId, true));
+ md.setMasterId(OafMapperUtils.createOpenaireId(10, masterId, true));
+ md.setMasterName(masterName);
+
+ return md;
+ } catch (final SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static void writeMap(final MasterDuplicate dm, final BufferedWriter writer) {
+ try {
+ writer.write(OBJECT_MAPPER.writeValueAsString(dm));
+ writer.newLine();
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/model/MasterDuplicate.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/model/MasterDuplicate.java
new file mode 100644
index 000000000..12a4407c4
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/action/model/MasterDuplicate.java
@@ -0,0 +1,38 @@
+
+package eu.dnetlib.dhp.common.action.model;
+
+import java.io.Serializable;
+
+/**
+ * @author miriam.baglioni
+ * @Date 21/07/22
+ */
+public class MasterDuplicate implements Serializable {
+ private String duplicateId;
+ private String masterId;
+ private String masterName;
+
+ public String getDuplicateId() {
+ return duplicateId;
+ }
+
+ public void setDuplicateId(String duplicateId) {
+ this.duplicateId = duplicateId;
+ }
+
+ public String getMasterId() {
+ return masterId;
+ }
+
+ public void setMasterId(String masterId) {
+ this.masterId = masterId;
+ }
+
+ public String getMasterName() {
+ return masterName;
+ }
+
+ public void setMasterName(String masterName) {
+ this.masterName = masterName;
+ }
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/ZenodoAPIClient.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/ZenodoAPIClient.java
index 3f5c6ad4a..544da78f5 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/ZenodoAPIClient.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/api/ZenodoAPIClient.java
@@ -3,10 +3,13 @@ package eu.dnetlib.dhp.common.api;
import java.io.*;
import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
import java.util.concurrent.TimeUnit;
import org.apache.http.HttpHeaders;
import org.apache.http.entity.ContentType;
+import org.jetbrains.annotations.NotNull;
import com.google.gson.Gson;
@@ -60,33 +63,31 @@ public class ZenodoAPIClient implements Serializable {
*/
public int newDeposition() throws IOException {
String json = "{}";
- OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
-
- RequestBody body = RequestBody.create(json, MEDIA_TYPE_JSON);
-
- Request request = new Request.Builder()
- .url(urlString)
- .addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .post(body)
- .build();
-
- try (Response response = httpClient.newCall(request).execute()) {
-
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
-
- // Get response body
- json = response.body().string();
-
- ZenodoModel newSubmission = new Gson().fromJson(json, ZenodoModel.class);
- this.bucket = newSubmission.getLinks().getBucket();
- this.deposition_id = newSubmission.getId();
-
- return response.code();
+ URL url = new URL(urlString);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setRequestMethod("POST");
+ conn.setDoOutput(true);
+ try (OutputStream os = conn.getOutputStream()) {
+ byte[] input = json.getBytes("utf-8");
+ os.write(input, 0, input.length);
}
+ String body = getBody(conn);
+
+ int responseCode = conn.getResponseCode();
+ conn.disconnect();
+
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + body);
+
+ ZenodoModel newSubmission = new Gson().fromJson(body, ZenodoModel.class);
+ this.bucket = newSubmission.getLinks().getBucket();
+ this.deposition_id = newSubmission.getId();
+
+ return responseCode;
}
/**
@@ -94,28 +95,48 @@ public class ZenodoAPIClient implements Serializable {
*
* @param is the inputStream for the file to upload
* @param file_name the name of the file as it will appear on Zenodo
- * @param len the size of the file
* @return the response code
*/
- public int uploadIS(InputStream is, String file_name, long len) throws IOException {
- OkHttpClient httpClient = new OkHttpClient.Builder()
- .writeTimeout(600, TimeUnit.SECONDS)
- .readTimeout(600, TimeUnit.SECONDS)
- .connectTimeout(600, TimeUnit.SECONDS)
- .build();
+ public int uploadIS(InputStream is, String file_name) throws IOException {
- Request request = new Request.Builder()
- .url(bucket + "/" + file_name)
- .addHeader(HttpHeaders.CONTENT_TYPE, "application/zip") // add request headers
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .put(InputStreamRequestBody.create(MEDIA_TYPE_ZIP, is, len))
- .build();
+ URL url = new URL(bucket + "/" + file_name);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, "application/zip");
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setDoOutput(true);
+ conn.setRequestMethod("PUT");
+
+ byte[] buf = new byte[8192];
+ int length;
+ try (OutputStream os = conn.getOutputStream()) {
+ while ((length = is.read(buf)) != -1) {
+ os.write(buf, 0, length);
+ }
- try (Response response = httpClient.newCall(request).execute()) {
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
- return response.code();
}
+ int responseCode = conn.getResponseCode();
+ if (!checkOKStatus(responseCode)) {
+ throw new IOException("Unexpected code " + responseCode + getBody(conn));
+ }
+
+ return responseCode;
+ }
+
+ @NotNull
+ private String getBody(HttpURLConnection conn) throws IOException {
+ String body = "{}";
+ try (BufferedReader br = new BufferedReader(
+ new InputStreamReader(conn.getInputStream(), "utf-8"))) {
+ StringBuilder response = new StringBuilder();
+ String responseLine = null;
+ while ((responseLine = br.readLine()) != null) {
+ response.append(responseLine.trim());
+ }
+
+ body = response.toString();
+
+ }
+ return body;
}
/**
@@ -127,26 +148,34 @@ public class ZenodoAPIClient implements Serializable {
*/
public int sendMretadata(String metadata) throws IOException {
- OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
+ URL url = new URL(urlString + "/" + deposition_id);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setDoOutput(true);
+ conn.setRequestMethod("PUT");
- RequestBody body = RequestBody.create(metadata, MEDIA_TYPE_JSON);
-
- Request request = new Request.Builder()
- .url(urlString + "/" + deposition_id)
- .addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .put(body)
- .build();
-
- try (Response response = httpClient.newCall(request).execute()) {
-
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
-
- return response.code();
+ try (OutputStream os = conn.getOutputStream()) {
+ byte[] input = metadata.getBytes("utf-8");
+ os.write(input, 0, input.length);
}
+ final int responseCode = conn.getResponseCode();
+ conn.disconnect();
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + getBody(conn));
+
+ return responseCode;
+
+ }
+
+ private boolean checkOKStatus(int responseCode) {
+
+ if (HttpURLConnection.HTTP_OK != responseCode ||
+ HttpURLConnection.HTTP_CREATED != responseCode)
+ return true;
+ return false;
}
/**
@@ -155,6 +184,7 @@ public class ZenodoAPIClient implements Serializable {
* @return response code
* @throws IOException
*/
+ @Deprecated
public int publish() throws IOException {
String json = "{}";
@@ -191,31 +221,37 @@ public class ZenodoAPIClient implements Serializable {
* @throws MissingConceptDoiException
*/
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
- setDepositionId(concept_rec_id);
+ setDepositionId(concept_rec_id, 1);
String json = "{}";
- OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
+ URL url = new URL(urlString + "/" + deposition_id + "/actions/newversion");
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
- RequestBody body = RequestBody.create(json, MEDIA_TYPE_JSON);
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setDoOutput(true);
+ conn.setRequestMethod("POST");
- Request request = new Request.Builder()
- .url(urlString + "/" + deposition_id + "/actions/newversion")
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .post(body)
- .build();
-
- try (Response response = httpClient.newCall(request).execute()) {
-
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
-
- ZenodoModel zenodoModel = new Gson().fromJson(response.body().string(), ZenodoModel.class);
- String latest_draft = zenodoModel.getLinks().getLatest_draft();
- deposition_id = latest_draft.substring(latest_draft.lastIndexOf("/") + 1);
- bucket = getBucket(latest_draft);
- return response.code();
+ try (OutputStream os = conn.getOutputStream()) {
+ byte[] input = json.getBytes("utf-8");
+ os.write(input, 0, input.length);
}
+
+ String body = getBody(conn);
+
+ int responseCode = conn.getResponseCode();
+
+ conn.disconnect();
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + body);
+
+ ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
+ String latest_draft = zenodoModel.getLinks().getLatest_draft();
+ deposition_id = latest_draft.substring(latest_draft.lastIndexOf("/") + 1);
+ bucket = getBucket(latest_draft);
+
+ return responseCode;
+
}
/**
@@ -233,29 +269,38 @@ public class ZenodoAPIClient implements Serializable {
this.deposition_id = deposition_id;
- OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
+ String json = "{}";
- Request request = new Request.Builder()
- .url(urlString + "/" + deposition_id)
- .addHeader("Authorization", "Bearer " + access_token)
- .build();
-
- try (Response response = httpClient.newCall(request).execute()) {
-
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
-
- ZenodoModel zenodoModel = new Gson().fromJson(response.body().string(), ZenodoModel.class);
- bucket = zenodoModel.getLinks().getBucket();
- return response.code();
+ URL url = new URL(urlString + "/" + deposition_id);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setRequestMethod("POST");
+ conn.setDoOutput(true);
+ try (OutputStream os = conn.getOutputStream()) {
+ byte[] input = json.getBytes("utf-8");
+ os.write(input, 0, input.length);
}
+ String body = getBody(conn);
+
+ int responseCode = conn.getResponseCode();
+ conn.disconnect();
+
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + body);
+
+ ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
+ bucket = zenodoModel.getLinks().getBucket();
+
+ return responseCode;
+
}
- private void setDepositionId(String concept_rec_id) throws IOException, MissingConceptDoiException {
+ private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
- ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(), ZenodoModelList.class);
+ ZenodoModelList zenodoModelList = new Gson()
+ .fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
for (ZenodoModel zm : zenodoModelList) {
if (zm.getConceptrecid().equals(concept_rec_id)) {
@@ -263,55 +308,57 @@ public class ZenodoAPIClient implements Serializable {
return;
}
}
-
- throw new MissingConceptDoiException("The concept record id specified was missing in the list of depositions");
+ if (zenodoModelList.size() == 0)
+ throw new MissingConceptDoiException(
+ "The concept record id specified was missing in the list of depositions");
+ setDepositionId(concept_rec_id, page + 1);
}
- private String getPrevDepositions() throws IOException {
- OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
+ private String getPrevDepositions(String page) throws IOException {
- Request request = new Request.Builder()
- .url(urlString)
- .addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .get()
- .build();
+ HttpUrl.Builder urlBuilder = HttpUrl.parse(urlString).newBuilder();
+ urlBuilder.addQueryParameter("page", page);
- try (Response response = httpClient.newCall(request).execute()) {
+ URL url = new URL(urlBuilder.build().toString());
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setDoOutput(true);
+ conn.setRequestMethod("GET");
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
+ String body = getBody(conn);
- return response.body().string();
+ int responseCode = conn.getResponseCode();
- }
+ conn.disconnect();
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + body);
+
+ return body;
}
- private String getBucket(String url) throws IOException {
- OkHttpClient httpClient = new OkHttpClient.Builder()
- .connectTimeout(600, TimeUnit.SECONDS)
- .build();
+ private String getBucket(String inputUurl) throws IOException {
- Request request = new Request.Builder()
- .url(url)
- .addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
- .addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
- .get()
- .build();
+ URL url = new URL(inputUurl);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
+ conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
+ conn.setDoOutput(true);
+ conn.setRequestMethod("GET");
- try (Response response = httpClient.newCall(request).execute()) {
+ String body = getBody(conn);
- if (!response.isSuccessful())
- throw new IOException("Unexpected code " + response + response.body().string());
+ int responseCode = conn.getResponseCode();
- // Get response body
- ZenodoModel zenodoModel = new Gson().fromJson(response.body().string(), ZenodoModel.class);
+ conn.disconnect();
+ if (!checkOKStatus(responseCode))
+ throw new IOException("Unexpected code " + responseCode + body);
- return zenodoModel.getLinks().getBucket();
+ ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
- }
+ return zenodoModel.getLinks().getBucket();
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/collection/DecompressTarGz.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/collection/DecompressTarGz.java
new file mode 100644
index 000000000..8bcf14ba4
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/collection/DecompressTarGz.java
@@ -0,0 +1,40 @@
+
+package eu.dnetlib.dhp.common.collection;
+
+import java.io.BufferedOutputStream;
+import java.io.IOException;
+import java.util.zip.GZIPOutputStream;
+
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
+import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+public class DecompressTarGz {
+
+ public static void doExtract(FileSystem fs, String outputPath, String tarGzPath) throws IOException {
+
+ FSDataInputStream inputFileStream = fs.open(new Path(tarGzPath));
+ try (TarArchiveInputStream tais = new TarArchiveInputStream(
+ new GzipCompressorInputStream(inputFileStream))) {
+ TarArchiveEntry entry = null;
+ while ((entry = tais.getNextTarEntry()) != null) {
+ if (!entry.isDirectory()) {
+ try (
+ FSDataOutputStream out = fs
+ .create(new Path(outputPath.concat(entry.getName()).concat(".gz")));
+ GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
+
+ IOUtils.copy(tais, gzipOs);
+
+ }
+
+ }
+ }
+ }
+ }
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/Vocabulary.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/Vocabulary.java
index b3eb98d4f..2ab23bda6 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/Vocabulary.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/Vocabulary.java
@@ -4,6 +4,7 @@ package eu.dnetlib.dhp.common.vocabulary;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
@@ -66,21 +67,39 @@ public class Vocabulary implements Serializable {
}
public Qualifier getTermAsQualifier(final String termId) {
- if (StringUtils.isBlank(termId)) {
+ return getTermAsQualifier(termId, false);
+ }
+
+ public Qualifier getTermAsQualifier(final String termId, boolean strict) {
+ final VocabularyTerm term = getTerm(termId);
+ if (Objects.nonNull(term)) {
+ return OafMapperUtils.qualifier(term.getId(), term.getName(), getId(), getName());
+ } else if (Objects.isNull(term) && strict) {
return OafMapperUtils.unknown(getId(), getName());
- } else if (termExists(termId)) {
- final VocabularyTerm t = getTerm(termId);
- return OafMapperUtils.qualifier(t.getId(), t.getName(), getId(), getName());
} else {
return OafMapperUtils.qualifier(termId, termId, getId(), getName());
}
}
public Qualifier getSynonymAsQualifier(final String syn) {
+ return getSynonymAsQualifier(syn, false);
+ }
+
+ public Qualifier getSynonymAsQualifier(final String syn, boolean strict) {
return Optional
.ofNullable(getTermBySynonym(syn))
- .map(term -> getTermAsQualifier(term.getId()))
+ .map(term -> getTermAsQualifier(term.getId(), strict))
.orElse(null);
}
+ public Qualifier lookup(String id) {
+ return lookup(id, false);
+ }
+
+ public Qualifier lookup(String id, boolean strict) {
+ return Optional
+ .ofNullable(getSynonymAsQualifier(id, strict))
+ .orElse(getTermAsQualifier(id, strict));
+ }
+
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/VocabularyGroup.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/VocabularyGroup.java
index d5f57849c..fc7175270 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/VocabularyGroup.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/vocabulary/VocabularyGroup.java
@@ -57,9 +57,17 @@ public class VocabularyGroup implements Serializable {
final String syn = arr[2].trim();
vocs.addSynonyms(vocId, termId, syn);
+
}
}
+ // add the term names as synonyms
+ vocs.vocs.values().forEach(voc -> {
+ voc.getTerms().values().forEach(term -> {
+ voc.addSynonym(term.getName().toLowerCase(), term.getId());
+ });
+ });
+
return vocs;
}
@@ -73,6 +81,13 @@ public class VocabularyGroup implements Serializable {
vocs.put(id.toLowerCase(), new Vocabulary(id, name));
}
+ public Optional find(final String vocId) {
+ return Optional
+ .ofNullable(vocId)
+ .map(String::toLowerCase)
+ .map(vocs::get);
+ }
+
public void addTerm(final String vocId, final String id, final String name) {
if (vocabularyExists(vocId)) {
vocs.get(vocId.toLowerCase()).addTerm(id, name);
diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/DispatchEntitiesSparkJob.java b/dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/DispatchEntitiesSparkJob.java
similarity index 53%
rename from dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/DispatchEntitiesSparkJob.java
rename to dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/DispatchEntitiesSparkJob.java
index ea738836b..4d2ccb178 100644
--- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/DispatchEntitiesSparkJob.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/DispatchEntitiesSparkJob.java
@@ -1,5 +1,5 @@
-package eu.dnetlib.dhp.oa.dedup;
+package eu.dnetlib.dhp.oa.merge;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
@@ -11,25 +11,18 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction;
-import org.apache.spark.sql.Encoders;
-import org.apache.spark.sql.SaveMode;
-import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
-import eu.dnetlib.dhp.schema.oaf.Oaf;
-import eu.dnetlib.dhp.schema.oaf.OafEntity;
+import eu.dnetlib.dhp.schema.common.ModelSupport;
public class DispatchEntitiesSparkJob {
private static final Logger log = LoggerFactory.getLogger(DispatchEntitiesSparkJob.class);
- private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
-
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
@@ -38,7 +31,7 @@ public class DispatchEntitiesSparkJob {
.requireNonNull(
DispatchEntitiesSparkJob.class
.getResourceAsStream(
- "/eu/dnetlib/dhp/oa/dedup/dispatch_entities_parameters.json")));
+ "/eu/dnetlib/dhp/oa/merge/dispatch_entities_parameters.json")));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
@@ -54,11 +47,8 @@ public class DispatchEntitiesSparkJob {
String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
- String graphTableClassName = parser.get("graphTableClassName");
- log.info("graphTableClassName: {}", graphTableClassName);
-
- @SuppressWarnings("unchecked")
- Class extends OafEntity> entityClazz = (Class extends OafEntity>) Class.forName(graphTableClassName);
+ boolean filterInvisible = Boolean.valueOf(parser.get("filterInvisible"));
+ log.info("filterInvisible: {}", filterInvisible);
SparkConf conf = new SparkConf();
runWithSparkSession(
@@ -66,32 +56,43 @@ public class DispatchEntitiesSparkJob {
isSparkSessionManaged,
spark -> {
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
- dispatchEntities(spark, inputPath, entityClazz, outputPath);
+ dispatchEntities(spark, inputPath, outputPath, filterInvisible);
});
}
- private static void dispatchEntities(
+ private static void dispatchEntities(
SparkSession spark,
String inputPath,
- Class clazz,
- String outputPath) {
+ String outputPath,
+ boolean filterInvisible) {
- spark
- .read()
- .textFile(inputPath)
- .filter((FilterFunction) s -> isEntityType(s, clazz))
- .map((MapFunction) s -> StringUtils.substringAfter(s, "|"), Encoders.STRING())
- .map(
- (MapFunction) value -> OBJECT_MAPPER.readValue(value, clazz),
- Encoders.bean(clazz))
- .write()
- .mode(SaveMode.Overwrite)
- .option("compression", "gzip")
- .json(outputPath);
+ Dataset df = spark.read().textFile(inputPath);
+
+ ModelSupport.oafTypes.entrySet().parallelStream().forEach(entry -> {
+ String entityType = entry.getKey();
+ Class> clazz = entry.getValue();
+
+ if (!entityType.equalsIgnoreCase("relation")) {
+ Dataset entityDF = spark
+ .read()
+ .schema(Encoders.bean(clazz).schema())
+ .json(
+ df
+ .filter((FilterFunction) s -> s.startsWith(clazz.getName()))
+ .map(
+ (MapFunction) s -> StringUtils.substringAfter(s, "|"),
+ Encoders.STRING()));
+
+ if (filterInvisible) {
+ entityDF = entityDF.filter("dataInfo.invisible != true");
+ }
+
+ entityDF
+ .write()
+ .mode(SaveMode.Overwrite)
+ .option("compression", "gzip")
+ .json(outputPath + "/" + entityType);
+ }
+ });
}
-
- private static boolean isEntityType(final String s, final Class clazz) {
- return StringUtils.substringBefore(s, "|").equals(clazz.getName());
- }
-
}
diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java b/dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/GroupEntitiesSparkJob.java
similarity index 98%
rename from dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java
rename to dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/GroupEntitiesSparkJob.java
index a19f86380..e652bd5b6 100644
--- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/GroupEntitiesSparkJob.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/oa/merge/GroupEntitiesSparkJob.java
@@ -1,5 +1,5 @@
-package eu.dnetlib.dhp.oa.dedup;
+package eu.dnetlib.dhp.oa.merge;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import static eu.dnetlib.dhp.utils.DHPUtils.toSeq;
@@ -53,7 +53,7 @@ public class GroupEntitiesSparkJob {
.toString(
GroupEntitiesSparkJob.class
.getResourceAsStream(
- "/eu/dnetlib/dhp/oa/dedup/group_graph_entities_parameters.json"));
+ "/eu/dnetlib/dhp/oa/merge/group_graph_entities_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/DoiCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/DoiCleaningRule.java
new file mode 100644
index 000000000..1a7482685
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/DoiCleaningRule.java
@@ -0,0 +1,14 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+public class DoiCleaningRule {
+
+ public static String clean(final String doi) {
+ return doi
+ .toLowerCase()
+ .replaceAll("\\s", "")
+ .replaceAll("^doi:", "")
+ .replaceFirst(CleaningFunctions.DOI_PREFIX_REGEX, CleaningFunctions.DOI_PREFIX);
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/FundRefCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/FundRefCleaningRule.java
new file mode 100644
index 000000000..a267b8b88
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/FundRefCleaningRule.java
@@ -0,0 +1,25 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class FundRefCleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("\\d+");
+
+ public static String clean(final String fundRefId) {
+
+ String s = fundRefId
+ .toLowerCase()
+ .replaceAll("\\s", "");
+
+ Matcher m = PATTERN.matcher(s);
+ if (m.find()) {
+ return m.group();
+ } else {
+ return "";
+ }
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java
index 592580ab8..8afa41f95 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GraphCleaningFunctions.java
@@ -1,6 +1,8 @@
package eu.dnetlib.dhp.schema.oaf.utils;
+import static eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils.getProvenance;
+
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
@@ -16,6 +18,7 @@ import com.github.sisyphsu.dateparser.DateParserUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
+import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*;
@@ -32,64 +35,217 @@ public class GraphCleaningFunctions extends CleaningFunctions {
public static final String TITLE_FILTER_REGEX = String.format("(%s)|\\W|\\d", TITLE_TEST);
public static final int TITLE_FILTER_RESIDUAL_LENGTH = 5;
+ private static final String NAME_CLEANING_REGEX = "[\\r\\n\\t\\s]+";
+
+ public static T cleanContext(T value, String contextId, String verifyParam) {
+ if (ModelSupport.isSubClass(value, Result.class)) {
+ final Result res = (Result) value;
+ if (shouldCleanContext(res, verifyParam)) {
+ res
+ .setContext(
+ res
+ .getContext()
+ .stream()
+ .filter(c -> !StringUtils.startsWith(c.getId().toLowerCase(), contextId))
+ .collect(Collectors.toList()));
+ }
+ return (T) res;
+ } else {
+ return value;
+ }
+ }
+
+ private static boolean shouldCleanContext(Result res, String verifyParam) {
+ boolean titleMatch = res
+ .getTitle()
+ .stream()
+ .filter(
+ t -> t
+ .getQualifier()
+ .getClassid()
+ .equalsIgnoreCase(ModelConstants.MAIN_TITLE_QUALIFIER.getClassid()))
+ .anyMatch(t -> t.getValue().toLowerCase().startsWith(verifyParam.toLowerCase()));
+
+ return titleMatch && Objects.nonNull(res.getContext());
+ }
+
+ public static T cleanCountry(T value, String[] verifyParam, Set hostedBy,
+ String collectedfrom, String country) {
+ if (ModelSupport.isSubClass(value, Result.class)) {
+ final Result res = (Result) value;
+ if (res.getInstance().stream().anyMatch(i -> hostedBy.contains(i.getHostedby().getKey())) ||
+ !res.getCollectedfrom().stream().anyMatch(cf -> cf.getValue().equals(collectedfrom))) {
+ return (T) res;
+ }
+
+ List ids = getPidsAndAltIds(res).collect(Collectors.toList());
+ if (ids
+ .stream()
+ .anyMatch(
+ p -> p
+ .getQualifier()
+ .getClassid()
+ .equals(PidType.doi.toString()) && pidInParam(p.getValue(), verifyParam))) {
+ res
+ .setCountry(
+ res
+ .getCountry()
+ .stream()
+ .filter(
+ c -> toTakeCountry(c, country))
+ .collect(Collectors.toList()));
+ }
+
+ return (T) res;
+ } else {
+ return value;
+ }
+ }
+
+ private static Stream getPidsAndAltIds(T r) {
+ final Stream resultPids = Optional
+ .ofNullable(r.getPid())
+ .map(Collection::stream)
+ .orElse(Stream.empty());
+
+ final Stream instancePids = Optional
+ .ofNullable(r.getInstance())
+ .map(
+ instance -> instance
+ .stream()
+ .flatMap(
+ i -> Optional
+ .ofNullable(i.getPid())
+ .map(Collection::stream)
+ .orElse(Stream.empty())))
+ .orElse(Stream.empty());
+
+ final Stream instanceAltIds = Optional
+ .ofNullable(r.getInstance())
+ .map(
+ instance -> instance
+ .stream()
+ .flatMap(
+ i -> Optional
+ .ofNullable(i.getAlternateIdentifier())
+ .map(Collection::stream)
+ .orElse(Stream.empty())))
+ .orElse(Stream.empty());
+
+ return Stream
+ .concat(
+ Stream.concat(resultPids, instancePids),
+ instanceAltIds);
+ }
+
+ private static boolean pidInParam(String value, String[] verifyParam) {
+ for (String s : verifyParam)
+ if (value.startsWith(s))
+ return true;
+ return false;
+ }
+
+ private static boolean toTakeCountry(Country c, String country) {
+ // If dataInfo is not set, or dataInfo.inferenceprovenance is not set or not present then it cannot be
+ // inserted via propagation
+ if (!Optional.ofNullable(c.getDataInfo()).isPresent())
+ return true;
+ if (!Optional.ofNullable(c.getDataInfo().getInferenceprovenance()).isPresent())
+ return true;
+ return !(c
+ .getClassid()
+ .equalsIgnoreCase(country) &&
+ c.getDataInfo().getInferenceprovenance().equals("propagation"));
+ }
public static T fixVocabularyNames(T value) {
- if (value instanceof Datasource) {
- // nothing to clean here
- } else if (value instanceof Project) {
- // nothing to clean here
- } else if (value instanceof Organization) {
- Organization o = (Organization) value;
- if (Objects.nonNull(o.getCountry())) {
- fixVocabName(o.getCountry(), ModelConstants.DNET_COUNTRY_TYPE);
+ if (value instanceof OafEntity) {
+
+ OafEntity e = (OafEntity) value;
+
+ Optional
+ .ofNullable(e.getPid())
+ .ifPresent(pid -> pid.forEach(p -> fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES)));
+
+ if (value instanceof Result) {
+ Result r = (Result) value;
+
+ fixVocabName(r.getLanguage(), ModelConstants.DNET_LANGUAGES);
+ fixVocabName(r.getResourcetype(), ModelConstants.DNET_DATA_CITE_RESOURCE);
+ fixVocabName(r.getBestaccessright(), ModelConstants.DNET_ACCESS_MODES);
+
+ if (Objects.nonNull(r.getSubject())) {
+ r.getSubject().forEach(s -> fixVocabName(s.getQualifier(), ModelConstants.DNET_SUBJECT_TYPOLOGIES));
+ }
+ if (Objects.nonNull(r.getInstance())) {
+ for (Instance i : r.getInstance()) {
+ fixVocabName(i.getAccessright(), ModelConstants.DNET_ACCESS_MODES);
+ fixVocabName(i.getRefereed(), ModelConstants.DNET_REVIEW_LEVELS);
+ Optional
+ .ofNullable(i.getPid())
+ .ifPresent(
+ pid -> pid.forEach(p -> fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES)));
+
+ }
+ }
+ if (Objects.nonNull(r.getAuthor())) {
+ r.getAuthor().stream().filter(Objects::nonNull).forEach(a -> {
+ if (Objects.nonNull(a.getPid())) {
+ a.getPid().stream().filter(Objects::nonNull).forEach(p -> {
+ fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES);
+ });
+ }
+ });
+ }
+ if (value instanceof Publication) {
+
+ } else if (value instanceof Dataset) {
+
+ } else if (value instanceof OtherResearchProduct) {
+
+ } else if (value instanceof Software) {
+
+ }
+ } else if (value instanceof Datasource) {
+ // nothing to clean here
+ } else if (value instanceof Project) {
+ // nothing to clean here
+ } else if (value instanceof Organization) {
+ Organization o = (Organization) value;
+ if (Objects.nonNull(o.getCountry())) {
+ fixVocabName(o.getCountry(), ModelConstants.DNET_COUNTRY_TYPE);
+ }
+
}
} else if (value instanceof Relation) {
// nothing to clean here
- } else if (value instanceof Result) {
-
- Result r = (Result) value;
-
- fixVocabName(r.getLanguage(), ModelConstants.DNET_LANGUAGES);
- fixVocabName(r.getResourcetype(), ModelConstants.DNET_DATA_CITE_RESOURCE);
- fixVocabName(r.getBestaccessright(), ModelConstants.DNET_ACCESS_MODES);
-
- if (Objects.nonNull(r.getSubject())) {
- r.getSubject().forEach(s -> fixVocabName(s.getQualifier(), ModelConstants.DNET_SUBJECT_TYPOLOGIES));
- }
- if (Objects.nonNull(r.getInstance())) {
- for (Instance i : r.getInstance()) {
- fixVocabName(i.getAccessright(), ModelConstants.DNET_ACCESS_MODES);
- fixVocabName(i.getRefereed(), ModelConstants.DNET_REVIEW_LEVELS);
- }
- }
- if (Objects.nonNull(r.getAuthor())) {
- r.getAuthor().stream().filter(Objects::nonNull).forEach(a -> {
- if (Objects.nonNull(a.getPid())) {
- a.getPid().stream().filter(Objects::nonNull).forEach(p -> {
- fixVocabName(p.getQualifier(), ModelConstants.DNET_PID_TYPES);
- });
- }
- });
- }
- if (value instanceof Publication) {
-
- } else if (value instanceof Dataset) {
-
- } else if (value instanceof OtherResearchProduct) {
-
- } else if (value instanceof Software) {
-
- }
}
return value;
}
public static boolean filter(T value) {
+ if (!(value instanceof Relation) && (Boolean.TRUE
+ .equals(
+ Optional
+ .ofNullable(value)
+ .map(
+ o -> Optional
+ .ofNullable(o.getDataInfo())
+ .map(
+ d -> Optional
+ .ofNullable(d.getInvisible())
+ .orElse(true))
+ .orElse(false))
+ .orElse(true)))) {
+ return true;
+ }
+
if (value instanceof Datasource) {
// nothing to evaluate here
} else if (value instanceof Project) {
- // nothing to evaluate here
+ final Project p = (Project) value;
+ return Objects.nonNull(p.getCode()) && StringUtils.isNotBlank(p.getCode().getValue());
} else if (value instanceof Organization) {
// nothing to evaluate here
} else if (value instanceof Relation) {
@@ -115,16 +271,344 @@ public class GraphCleaningFunctions extends CleaningFunctions {
return true;
}
- public static T cleanup(T value) {
- if (value instanceof Datasource) {
- // nothing to clean here
- } else if (value instanceof Project) {
- // nothing to clean here
- } else if (value instanceof Organization) {
- Organization o = (Organization) value;
- if (Objects.isNull(o.getCountry()) || StringUtils.isBlank(o.getCountry().getClassid())) {
- o.setCountry(ModelConstants.UNKNOWN_COUNTRY);
+ public static T cleanup(T value, VocabularyGroup vocs) {
+
+ if (value instanceof OafEntity) {
+
+ OafEntity e = (OafEntity) value;
+ if (Objects.nonNull(e.getPid())) {
+ e.setPid(processPidCleaning(e.getPid()));
}
+
+ if (value instanceof Datasource) {
+ // nothing to clean here
+ } else if (value instanceof Project) {
+ // nothing to clean here
+ } else if (value instanceof Organization) {
+ Organization o = (Organization) value;
+ if (Objects.isNull(o.getCountry()) || StringUtils.isBlank(o.getCountry().getClassid())) {
+ o.setCountry(ModelConstants.UNKNOWN_COUNTRY);
+ }
+ } else if (value instanceof Result) {
+ Result r = (Result) value;
+
+ if (Objects.nonNull(r.getFulltext())
+ && (ModelConstants.SOFTWARE_RESULTTYPE_CLASSID.equals(r.getResulttype().getClassid()) ||
+ ModelConstants.DATASET_RESULTTYPE_CLASSID.equals(r.getResulttype().getClassid()))) {
+ r.setFulltext(null);
+
+ }
+
+ if (Objects.nonNull(r.getDateofacceptance())) {
+ Optional date = cleanDateField(r.getDateofacceptance());
+ if (date.isPresent()) {
+ r.getDateofacceptance().setValue(date.get());
+ } else {
+ r.setDateofacceptance(null);
+ }
+ }
+ if (Objects.nonNull(r.getRelevantdate())) {
+ r
+ .setRelevantdate(
+ r
+ .getRelevantdate()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(sp -> Objects.nonNull(sp.getQualifier()))
+ .filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
+ .map(sp -> {
+ sp.setValue(GraphCleaningFunctions.cleanDate(sp.getValue()));
+ return sp;
+ })
+ .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
+ .collect(Collectors.toList()));
+ }
+ if (Objects.nonNull(r.getPublisher())) {
+ if (StringUtils.isBlank(r.getPublisher().getValue())) {
+ r.setPublisher(null);
+ } else {
+ r
+ .getPublisher()
+ .setValue(
+ r
+ .getPublisher()
+ .getValue()
+ .replaceAll(NAME_CLEANING_REGEX, " "));
+ }
+ }
+ if (Objects.isNull(r.getLanguage()) || StringUtils.isBlank(r.getLanguage().getClassid())) {
+ r
+ .setLanguage(
+ qualifier("und", "Undetermined", ModelConstants.DNET_LANGUAGES));
+ }
+ if (Objects.nonNull(r.getSubject())) {
+ List subjects = Lists
+ .newArrayList(
+ r
+ .getSubject()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
+ .filter(sp -> Objects.nonNull(sp.getQualifier()))
+ .filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
+ .map(s -> {
+ if ("dnet:result_subject".equals(s.getQualifier().getClassid())) {
+ s.getQualifier().setClassid(ModelConstants.DNET_SUBJECT_TYPOLOGIES);
+ s.getQualifier().setClassname(ModelConstants.DNET_SUBJECT_TYPOLOGIES);
+ }
+ return s;
+ })
+ .map(GraphCleaningFunctions::cleanValue)
+ .collect(
+ Collectors
+ .toMap(
+ s -> Optional
+ .ofNullable(s.getQualifier())
+ .map(q -> q.getClassid() + s.getValue())
+ .orElse(s.getValue()),
+ Function.identity(),
+ (s1, s2) -> Collections
+ .min(Lists.newArrayList(s1, s2), new SubjectProvenanceComparator())))
+ .values());
+ r.setSubject(subjects);
+ }
+ if (Objects.nonNull(r.getTitle())) {
+ r
+ .setTitle(
+ r
+ .getTitle()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
+ .filter(
+ sp -> {
+ final String title = sp
+ .getValue()
+ .toLowerCase();
+ final String decoded = Unidecode.decode(title);
+
+ if (StringUtils.contains(decoded, TITLE_TEST)) {
+ return decoded
+ .replaceAll(TITLE_FILTER_REGEX, "")
+ .length() > TITLE_FILTER_RESIDUAL_LENGTH;
+ }
+ return !decoded
+ .replaceAll("\\W|\\d", "")
+ .isEmpty();
+ })
+ .map(GraphCleaningFunctions::cleanValue)
+ .collect(Collectors.toList()));
+ }
+ if (Objects.nonNull(r.getFormat())) {
+ r
+ .setFormat(
+ r
+ .getFormat()
+ .stream()
+ .map(GraphCleaningFunctions::cleanValue)
+ .collect(Collectors.toList()));
+ }
+ if (Objects.nonNull(r.getDescription())) {
+ r
+ .setDescription(
+ r
+ .getDescription()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
+ .map(GraphCleaningFunctions::cleanValue)
+ .collect(Collectors.toList()));
+ }
+ if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) {
+ r
+ .setResourcetype(
+ qualifier(ModelConstants.UNKNOWN, "Unknown", ModelConstants.DNET_DATA_CITE_RESOURCE));
+ }
+ if (Objects.nonNull(r.getInstance())) {
+
+ for (Instance i : r.getInstance()) {
+ if (!vocs
+ .termExists(ModelConstants.DNET_PUBLICATION_RESOURCE, i.getInstancetype().getClassid())) {
+ if (r instanceof Publication) {
+ i
+ .setInstancetype(
+ OafMapperUtils
+ .qualifier(
+ "0038", "Other literature type",
+ ModelConstants.DNET_PUBLICATION_RESOURCE,
+ ModelConstants.DNET_PUBLICATION_RESOURCE));
+ } else if (r instanceof Dataset) {
+ i
+ .setInstancetype(
+ OafMapperUtils
+ .qualifier(
+ "0039", "Other dataset type", ModelConstants.DNET_PUBLICATION_RESOURCE,
+ ModelConstants.DNET_PUBLICATION_RESOURCE));
+ } else if (r instanceof Software) {
+ i
+ .setInstancetype(
+ OafMapperUtils
+ .qualifier(
+ "0040", "Other software type", ModelConstants.DNET_PUBLICATION_RESOURCE,
+ ModelConstants.DNET_PUBLICATION_RESOURCE));
+ } else if (r instanceof OtherResearchProduct) {
+ i
+ .setInstancetype(
+ OafMapperUtils
+ .qualifier(
+ "0020", "Other ORP type", ModelConstants.DNET_PUBLICATION_RESOURCE,
+ ModelConstants.DNET_PUBLICATION_RESOURCE));
+ }
+ }
+
+ if (Objects.nonNull(i.getPid())) {
+ i.setPid(processPidCleaning(i.getPid()));
+ }
+ if (Objects.nonNull(i.getAlternateIdentifier())) {
+ i.setAlternateIdentifier(processPidCleaning(i.getAlternateIdentifier()));
+ }
+ Optional
+ .ofNullable(i.getPid())
+ .ifPresent(pid -> {
+ final Set pids = Sets.newHashSet(pid);
+ Optional
+ .ofNullable(i.getAlternateIdentifier())
+ .ifPresent(altId -> {
+ final Set altIds = Sets.newHashSet(altId);
+ i.setAlternateIdentifier(Lists.newArrayList(Sets.difference(altIds, pids)));
+ });
+ });
+
+ if (Objects.isNull(i.getAccessright())
+ || StringUtils.isBlank(i.getAccessright().getClassid())) {
+ i
+ .setAccessright(
+ accessRight(
+ ModelConstants.UNKNOWN, ModelConstants.NOT_AVAILABLE,
+ ModelConstants.DNET_ACCESS_MODES));
+ }
+ if (Objects.isNull(i.getHostedby()) || StringUtils.isBlank(i.getHostedby().getKey())) {
+ i.setHostedby(ModelConstants.UNKNOWN_REPOSITORY);
+ }
+ if (Objects.isNull(i.getRefereed()) || StringUtils.isBlank(i.getRefereed().getClassid())) {
+ i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS));
+ }
+ if (Objects.nonNull(i.getDateofacceptance())) {
+ Optional date = cleanDateField(i.getDateofacceptance());
+ if (date.isPresent()) {
+ i.getDateofacceptance().setValue(date.get());
+ } else {
+ i.setDateofacceptance(null);
+ }
+ }
+ if (StringUtils.isNotBlank(i.getFulltext()) &&
+ (ModelConstants.SOFTWARE_RESULTTYPE_CLASSID.equals(r.getResulttype().getClassid()) ||
+ ModelConstants.DATASET_RESULTTYPE_CLASSID.equals(r.getResulttype().getClassid()))) {
+ i.setFulltext(null);
+ }
+ }
+ }
+ if (Objects.isNull(r.getBestaccessright())
+ || StringUtils.isBlank(r.getBestaccessright().getClassid())) {
+ Qualifier bestaccessrights = OafMapperUtils.createBestAccessRights(r.getInstance());
+ if (Objects.isNull(bestaccessrights)) {
+ r
+ .setBestaccessright(
+ qualifier(
+ ModelConstants.UNKNOWN, ModelConstants.NOT_AVAILABLE,
+ ModelConstants.DNET_ACCESS_MODES));
+ } else {
+ r.setBestaccessright(bestaccessrights);
+ }
+ }
+ if (Objects.nonNull(r.getAuthor())) {
+ r
+ .setAuthor(
+ r
+ .getAuthor()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(a -> StringUtils.isNotBlank(a.getFullname()))
+ .filter(a -> StringUtils.isNotBlank(a.getFullname().replaceAll("[\\W]", "")))
+ .map(GraphCleaningFunctions::cleanupAuthor)
+ .collect(Collectors.toList()));
+
+ boolean nullRank = r
+ .getAuthor()
+ .stream()
+ .anyMatch(a -> Objects.isNull(a.getRank()));
+ if (nullRank) {
+ int i = 1;
+ for (Author author : r.getAuthor()) {
+ author.setRank(i++);
+ }
+ }
+
+ for (Author a : r.getAuthor()) {
+ if (Objects.isNull(a.getPid())) {
+ a.setPid(Lists.newArrayList());
+ } else {
+ a
+ .setPid(
+ a
+ .getPid()
+ .stream()
+ .filter(Objects::nonNull)
+ .filter(p -> Objects.nonNull(p.getQualifier()))
+ .filter(p -> StringUtils.isNotBlank(p.getValue()))
+ .map(p -> {
+ // hack to distinguish orcid from orcid_pending
+ String pidProvenance = getProvenance(p.getDataInfo());
+ if (p
+ .getQualifier()
+ .getClassid()
+ .toLowerCase()
+ .contains(ModelConstants.ORCID)) {
+ if (pidProvenance
+ .equals(ModelConstants.SYSIMPORT_CROSSWALK_ENTITYREGISTRY)) {
+ p.getQualifier().setClassid(ModelConstants.ORCID);
+ } else {
+ p.getQualifier().setClassid(ModelConstants.ORCID_PENDING);
+ }
+ final String orcid = p
+ .getValue()
+ .trim()
+ .toLowerCase()
+ .replaceAll(ORCID_CLEANING_REGEX, "$1-$2-$3-$4");
+ if (orcid.length() == ORCID_LEN) {
+ p.setValue(orcid);
+ } else {
+ p.setValue("");
+ }
+ }
+ return p;
+ })
+ .filter(p -> StringUtils.isNotBlank(p.getValue()))
+ .collect(
+ Collectors
+ .toMap(
+ p -> p.getQualifier().getClassid() + p.getValue(),
+ Function.identity(),
+ (p1, p2) -> p1,
+ LinkedHashMap::new))
+ .values()
+ .stream()
+ .collect(Collectors.toList()));
+ }
+ }
+ }
+ if (value instanceof Publication) {
+
+ } else if (value instanceof Dataset) {
+
+ } else if (value instanceof OtherResearchProduct) {
+
+ } else if (value instanceof Software) {
+
+ }
+
+ }
+
} else if (value instanceof Relation) {
Relation r = (Relation) value;
@@ -136,253 +620,40 @@ public class GraphCleaningFunctions extends CleaningFunctions {
r.setValidationDate(null);
r.setValidated(false);
}
- } else if (value instanceof Result) {
-
- Result r = (Result) value;
-
- if (Objects.nonNull(r.getDateofacceptance())) {
- Optional date = cleanDateField(r.getDateofacceptance());
- if (date.isPresent()) {
- r.getDateofacceptance().setValue(date.get());
- } else {
- r.setDateofacceptance(null);
- }
- }
- if (Objects.nonNull(r.getRelevantdate())) {
- r
- .setRelevantdate(
- r
- .getRelevantdate()
- .stream()
- .filter(Objects::nonNull)
- .filter(sp -> Objects.nonNull(sp.getQualifier()))
- .filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
- .map(sp -> {
- sp.setValue(GraphCleaningFunctions.cleanDate(sp.getValue()));
- return sp;
- })
- .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
- .collect(Collectors.toList()));
- }
- if (Objects.nonNull(r.getPublisher()) && StringUtils.isBlank(r.getPublisher().getValue())) {
- r.setPublisher(null);
- }
- if (Objects.isNull(r.getLanguage()) || StringUtils.isBlank(r.getLanguage().getClassid())) {
- r
- .setLanguage(
- qualifier("und", "Undetermined", ModelConstants.DNET_LANGUAGES));
- }
- if (Objects.nonNull(r.getSubject())) {
- r
- .setSubject(
- r
- .getSubject()
- .stream()
- .filter(Objects::nonNull)
- .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
- .filter(sp -> Objects.nonNull(sp.getQualifier()))
- .filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
- .map(GraphCleaningFunctions::cleanValue)
- .collect(Collectors.toList()));
- }
- if (Objects.nonNull(r.getTitle())) {
- r
- .setTitle(
- r
- .getTitle()
- .stream()
- .filter(Objects::nonNull)
- .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
- .filter(
- sp -> {
- final String title = sp
- .getValue()
- .toLowerCase();
- final String decoded = Unidecode.decode(title);
-
- if (StringUtils.contains(decoded, TITLE_TEST)) {
- return decoded
- .replaceAll(TITLE_FILTER_REGEX, "")
- .length() > TITLE_FILTER_RESIDUAL_LENGTH;
- }
- return !decoded
- .replaceAll("\\W|\\d", "")
- .isEmpty();
- })
- .map(GraphCleaningFunctions::cleanValue)
- .collect(Collectors.toList()));
- }
- if (Objects.nonNull(r.getDescription())) {
- r
- .setDescription(
- r
- .getDescription()
- .stream()
- .filter(Objects::nonNull)
- .filter(sp -> StringUtils.isNotBlank(sp.getValue()))
- .map(GraphCleaningFunctions::cleanValue)
- .collect(Collectors.toList()));
- }
- if (Objects.nonNull(r.getPid())) {
- r.setPid(processPidCleaning(r.getPid()));
- }
- if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) {
- r
- .setResourcetype(
- qualifier(ModelConstants.UNKNOWN, "Unknown", ModelConstants.DNET_DATA_CITE_RESOURCE));
- }
- if (Objects.nonNull(r.getInstance())) {
-
- for (Instance i : r.getInstance()) {
- if (Objects.nonNull(i.getPid())) {
- i.setPid(processPidCleaning(i.getPid()));
- }
- if (Objects.nonNull(i.getAlternateIdentifier())) {
- i.setAlternateIdentifier(processPidCleaning(i.getAlternateIdentifier()));
- }
- Optional
- .ofNullable(i.getPid())
- .ifPresent(pid -> {
- final Set pids = Sets.newHashSet(pid);
- Optional
- .ofNullable(i.getAlternateIdentifier())
- .ifPresent(altId -> {
- final Set altIds = Sets.newHashSet(altId);
- i.setAlternateIdentifier(Lists.newArrayList(Sets.difference(altIds, pids)));
- });
- });
-
- if (Objects.isNull(i.getAccessright()) || StringUtils.isBlank(i.getAccessright().getClassid())) {
- i
- .setAccessright(
- accessRight(
- ModelConstants.UNKNOWN, ModelConstants.NOT_AVAILABLE,
- ModelConstants.DNET_ACCESS_MODES));
- }
- if (Objects.isNull(i.getHostedby()) || StringUtils.isBlank(i.getHostedby().getKey())) {
- i.setHostedby(ModelConstants.UNKNOWN_REPOSITORY);
- }
- if (Objects.isNull(i.getRefereed())) {
- i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS));
- }
- if (Objects.nonNull(i.getDateofacceptance())) {
- Optional date = cleanDateField(i.getDateofacceptance());
- if (date.isPresent()) {
- i.getDateofacceptance().setValue(date.get());
- } else {
- i.setDateofacceptance(null);
- }
- }
- }
- }
- if (Objects.isNull(r.getBestaccessright()) || StringUtils.isBlank(r.getBestaccessright().getClassid())) {
- Qualifier bestaccessrights = OafMapperUtils.createBestAccessRights(r.getInstance());
- if (Objects.isNull(bestaccessrights)) {
- r
- .setBestaccessright(
- qualifier(
- ModelConstants.UNKNOWN, ModelConstants.NOT_AVAILABLE,
- ModelConstants.DNET_ACCESS_MODES));
- } else {
- r.setBestaccessright(bestaccessrights);
- }
- }
- if (Objects.nonNull(r.getAuthor())) {
- r
- .setAuthor(
- r
- .getAuthor()
- .stream()
- .filter(Objects::nonNull)
- .filter(a -> StringUtils.isNotBlank(a.getFullname()))
- .filter(a -> StringUtils.isNotBlank(a.getFullname().replaceAll("[\\W]", "")))
- .collect(Collectors.toList()));
-
- boolean nullRank = r
- .getAuthor()
- .stream()
- .anyMatch(a -> Objects.isNull(a.getRank()));
- if (nullRank) {
- int i = 1;
- for (Author author : r.getAuthor()) {
- author.setRank(i++);
- }
- }
-
- for (Author a : r.getAuthor()) {
- if (Objects.isNull(a.getPid())) {
- a.setPid(Lists.newArrayList());
- } else {
- a
- .setPid(
- a
- .getPid()
- .stream()
- .filter(Objects::nonNull)
- .filter(p -> Objects.nonNull(p.getQualifier()))
- .filter(p -> StringUtils.isNotBlank(p.getValue()))
- .map(p -> {
- // hack to distinguish orcid from orcid_pending
- String pidProvenance = Optional
- .ofNullable(p.getDataInfo())
- .map(
- d -> Optional
- .ofNullable(d.getProvenanceaction())
- .map(Qualifier::getClassid)
- .orElse(""))
- .orElse("");
- if (p
- .getQualifier()
- .getClassid()
- .toLowerCase()
- .contains(ModelConstants.ORCID)) {
- if (pidProvenance
- .equals(ModelConstants.SYSIMPORT_CROSSWALK_ENTITYREGISTRY)) {
- p.getQualifier().setClassid(ModelConstants.ORCID);
- } else {
- p.getQualifier().setClassid(ModelConstants.ORCID_PENDING);
- }
- final String orcid = p
- .getValue()
- .trim()
- .toLowerCase()
- .replaceAll(ORCID_CLEANING_REGEX, "$1-$2-$3-$4");
- if (orcid.length() == ORCID_LEN) {
- p.setValue(orcid);
- } else {
- p.setValue("");
- }
- }
- return p;
- })
- .filter(p -> StringUtils.isNotBlank(p.getValue()))
- .collect(
- Collectors
- .toMap(
- p -> p.getQualifier().getClassid() + p.getValue(),
- Function.identity(),
- (p1, p2) -> p1,
- LinkedHashMap::new))
- .values()
- .stream()
- .collect(Collectors.toList()));
- }
- }
- }
- if (value instanceof Publication) {
-
- } else if (value instanceof Dataset) {
-
- } else if (value instanceof OtherResearchProduct) {
-
- } else if (value instanceof Software) {
-
- }
}
return value;
}
+ private static Author cleanupAuthor(Author author) {
+ if (StringUtils.isNotBlank(author.getFullname())) {
+ author
+ .setFullname(
+ author
+ .getFullname()
+ .replaceAll(NAME_CLEANING_REGEX, " ")
+ .replace("\"", "\\\""));
+ }
+ if (StringUtils.isNotBlank(author.getName())) {
+ author
+ .setName(
+ author
+ .getName()
+ .replaceAll(NAME_CLEANING_REGEX, " ")
+ .replace("\"", "\\\""));
+ }
+ if (StringUtils.isNotBlank(author.getSurname())) {
+ author
+ .setSurname(
+ author
+ .getSurname()
+ .replaceAll(NAME_CLEANING_REGEX, " ")
+ .replace("\"", "\\\""));
+ }
+
+ return author;
+ }
+
private static Optional cleanDateField(Field dateofacceptance) {
return Optional
.ofNullable(dateofacceptance)
@@ -432,7 +703,7 @@ public class GraphCleaningFunctions extends CleaningFunctions {
.filter(sp -> !PID_BLACKLIST.contains(sp.getValue().trim().toLowerCase()))
.filter(sp -> Objects.nonNull(sp.getQualifier()))
.filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
- .map(CleaningFunctions::normalizePidValue)
+ .map(PidCleaner::normalizePidValue)
.filter(CleaningFunctions::pidFilter)
.collect(Collectors.toList());
}
@@ -461,6 +732,11 @@ public class GraphCleaningFunctions extends CleaningFunctions {
return s;
}
+ protected static Subject cleanValue(Subject s) {
+ s.setValue(s.getValue().replaceAll(CLEANING_REGEX, " "));
+ return s;
+ }
+
protected static Field cleanValue(Field s) {
s.setValue(s.getValue().replaceAll(CLEANING_REGEX, " "));
return s;
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRule.java
new file mode 100644
index 000000000..37ab91dd5
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRule.java
@@ -0,0 +1,24 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class GridCleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("(?\\d{4,6}\\.[0-9a-z]{1,2})");
+
+ public static String clean(String grid) {
+ String s = grid
+ .replaceAll("\\s", "")
+ .toLowerCase();
+
+ Matcher m = PATTERN.matcher(s);
+ if (m.find()) {
+ return "grid." + m.group("grid");
+ }
+
+ return "";
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRule.java
new file mode 100644
index 000000000..bcd8279cc
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRule.java
@@ -0,0 +1,21 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// https://www.wikidata.org/wiki/Property:P213
+public class ISNICleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("([0]{4}) ?([0-9]{4}) ?([0-9]{4}) ?([0-9]{3}[0-9X])");
+
+ public static String clean(final String isni) {
+
+ Matcher m = PATTERN.matcher(isni);
+ if (m.find()) {
+ return String.join("", m.group(1), m.group(2), m.group(3), m.group(4));
+ } else {
+ return "";
+ }
+ }
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java
index 720fe47fb..c58096d35 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtils.java
@@ -3,6 +3,8 @@ package eu.dnetlib.dhp.schema.oaf.utils;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
+import java.sql.Array;
+import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
@@ -12,6 +14,7 @@ import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.dhp.schema.common.AccessRightComparator;
+import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*;
@@ -47,6 +50,17 @@ public class OafMapperUtils {
}
public static Result mergeResults(Result left, Result right) {
+
+ final boolean leftFromDelegatedAuthority = isFromDelegatedAuthority(left);
+ final boolean rightFromDelegatedAuthority = isFromDelegatedAuthority(right);
+
+ if (leftFromDelegatedAuthority && !rightFromDelegatedAuthority) {
+ return left;
+ }
+ if (!leftFromDelegatedAuthority && rightFromDelegatedAuthority) {
+ return right;
+ }
+
if (new ResultTypeComparator().compare(left, right) < 0) {
left.mergeFrom(right);
return left;
@@ -56,6 +70,18 @@ public class OafMapperUtils {
}
}
+ private static boolean isFromDelegatedAuthority(Result r) {
+ return Optional
+ .ofNullable(r.getInstance())
+ .map(
+ instance -> instance
+ .stream()
+ .filter(i -> Objects.nonNull(i.getCollectedfrom()))
+ .map(i -> i.getCollectedfrom().getKey())
+ .anyMatch(cfId -> IdentifierFactory.delegatedAuthorityDatasourceIds().contains(cfId)))
+ .orElse(false);
+ }
+
public static KeyValue keyValue(final String k, final String v) {
final KeyValue kv = new KeyValue();
kv.setKey(k);
@@ -95,6 +121,17 @@ public class OafMapperUtils {
.collect(Collectors.toList());
}
+ public static List listValues(Array values) throws SQLException {
+ if (Objects.isNull(values)) {
+ return null;
+ }
+ return Arrays
+ .stream((T[]) values.getArray())
+ .filter(Objects::nonNull)
+ .distinct()
+ .collect(Collectors.toList());
+ }
+
public static List> listFields(final DataInfo info, final List values) {
return values
.stream()
@@ -105,7 +142,7 @@ public class OafMapperUtils {
}
public static Qualifier unknown(final String schemeid, final String schemename) {
- return qualifier("UNKNOWN", "Unknown", schemeid, schemename);
+ return qualifier(UNKNOWN, "Unknown", schemeid, schemename);
}
public static AccessRight accessRight(
@@ -153,6 +190,17 @@ public class OafMapperUtils {
return q;
}
+ public static Subject subject(
+ final String value,
+ final String classid,
+ final String classname,
+ final String schemeid,
+ final String schemename,
+ final DataInfo dataInfo) {
+
+ return subject(value, qualifier(classid, classname, schemeid, schemename), dataInfo);
+ }
+
public static StructuredProperty structuredProperty(
final String value,
final String classid,
@@ -164,6 +212,20 @@ public class OafMapperUtils {
return structuredProperty(value, qualifier(classid, classname, schemeid, schemename), dataInfo);
}
+ public static Subject subject(
+ final String value,
+ final Qualifier qualifier,
+ final DataInfo dataInfo) {
+ if (value == null) {
+ return null;
+ }
+ final Subject s = new Subject();
+ s.setValue(value);
+ s.setQualifier(qualifier);
+ s.setDataInfo(dataInfo);
+ return s;
+ }
+
public static StructuredProperty structuredProperty(
final String value,
final Qualifier qualifier,
@@ -368,4 +430,88 @@ public class OafMapperUtils {
}
return null;
}
+
+ public static KeyValue newKeyValueInstance(String key, String value, DataInfo dataInfo) {
+ KeyValue kv = new KeyValue();
+ kv.setDataInfo(dataInfo);
+ kv.setKey(key);
+ kv.setValue(value);
+ return kv;
+ }
+
+ public static Measure newMeasureInstance(String id, String value, String key, DataInfo dataInfo) {
+ Measure m = new Measure();
+ m.setId(id);
+ m.setUnit(Arrays.asList(newKeyValueInstance(key, value, dataInfo)));
+ return m;
+ }
+
+ public static Relation getRelation(final String source,
+ final String target,
+ final String relType,
+ final String subRelType,
+ final String relClass,
+ final OafEntity entity) {
+ return getRelation(source, target, relType, subRelType, relClass, entity, null);
+ }
+
+ public static Relation getRelation(final String source,
+ final String target,
+ final String relType,
+ final String subRelType,
+ final String relClass,
+ final OafEntity entity,
+ final String validationDate) {
+ return getRelation(
+ source, target, relType, subRelType, relClass, entity.getCollectedfrom(), entity.getDataInfo(),
+ entity.getLastupdatetimestamp(), validationDate, null);
+ }
+
+ public static Relation getRelation(final String source,
+ final String target,
+ final String relType,
+ final String subRelType,
+ final String relClass,
+ final List collectedfrom,
+ final DataInfo dataInfo,
+ final Long lastupdatetimestamp) {
+ return getRelation(
+ source, target, relType, subRelType, relClass, collectedfrom, dataInfo, lastupdatetimestamp, null, null);
+ }
+
+ public static Relation getRelation(final String source,
+ final String target,
+ final String relType,
+ final String subRelType,
+ final String relClass,
+ final List collectedfrom,
+ final DataInfo dataInfo,
+ final Long lastupdatetimestamp,
+ final String validationDate,
+ final List properties) {
+ final Relation rel = new Relation();
+ rel.setRelType(relType);
+ rel.setSubRelType(subRelType);
+ rel.setRelClass(relClass);
+ rel.setSource(source);
+ rel.setTarget(target);
+ rel.setCollectedfrom(collectedfrom);
+ rel.setDataInfo(dataInfo);
+ rel.setLastupdatetimestamp(lastupdatetimestamp);
+ rel.setValidated(StringUtils.isNotBlank(validationDate));
+ rel.setValidationDate(StringUtils.isNotBlank(validationDate) ? validationDate : null);
+ rel.setProperties(properties);
+ return rel;
+ }
+
+ public static String getProvenance(DataInfo dataInfo) {
+ return Optional
+ .ofNullable(dataInfo)
+ .map(
+ d -> Optional
+ .ofNullable(d.getProvenanceaction())
+ .map(Qualifier::getClassid)
+ .orElse(""))
+ .orElse("");
+ }
}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRule.java
new file mode 100644
index 000000000..a2213ed9f
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRule.java
@@ -0,0 +1,21 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PICCleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("\\d{9}");
+
+ public static String clean(final String pic) {
+
+ Matcher m = PATTERN.matcher(pic);
+ if (m.find()) {
+ return m.group();
+ } else {
+ return "";
+ }
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PidCleaner.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PidCleaner.java
new file mode 100644
index 000000000..114c2b3af
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PidCleaner.java
@@ -0,0 +1,62 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.Optional;
+
+import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
+
+public class PidCleaner {
+
+ /**
+ * Utility method that normalises PID values on a per-type basis.
+ * @param pid the PID whose value will be normalised.
+ * @return the PID containing the normalised value.
+ */
+ public static StructuredProperty normalizePidValue(StructuredProperty pid) {
+ pid
+ .setValue(
+ normalizePidValue(
+ pid.getQualifier().getClassid(),
+ pid.getValue()));
+
+ return pid;
+ }
+
+ public static String normalizePidValue(String pidType, String pidValue) {
+ String value = Optional
+ .ofNullable(pidValue)
+ .map(String::trim)
+ .orElseThrow(() -> new IllegalArgumentException("PID value cannot be empty"));
+
+ switch (pidType) {
+
+ // TODO add cleaning for more PID types as needed
+
+ // Result
+ case "doi":
+ return DoiCleaningRule.clean(value);
+ case "pmid":
+ return PmidCleaningRule.clean(value);
+ case "pmc":
+ return PmcCleaningRule.clean(value);
+ case "handle":
+ case "arXiv":
+ return value;
+
+ // Organization
+ case "GRID":
+ return GridCleaningRule.clean(value);
+ case "ISNI":
+ return ISNICleaningRule.clean(value);
+ case "ROR":
+ return RorCleaningRule.clean(value);
+ case "PIC":
+ return PICCleaningRule.clean(value);
+ case "FundRef":
+ return FundRefCleaningRule.clean(value);
+ default:
+ return value;
+ }
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRule.java
new file mode 100644
index 000000000..903041d43
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRule.java
@@ -0,0 +1,24 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PmcCleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("PMC\\d{1,8}");
+
+ public static String clean(String pmc) {
+ String s = pmc
+ .replaceAll("\\s", "")
+ .toUpperCase();
+
+ final Matcher m = PATTERN.matcher(s);
+
+ if (m.find()) {
+ return m.group();
+ }
+ return "";
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRule.java
new file mode 100644
index 000000000..d0f5a3b27
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRule.java
@@ -0,0 +1,25 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// https://researchguides.stevens.edu/c.php?g=442331&p=6577176
+public class PmidCleaningRule {
+
+ public static final Pattern PATTERN = Pattern.compile("[1-9]{1,8}");
+
+ public static String clean(String pmid) {
+ String s = pmid
+ .toLowerCase()
+ .replaceAll("\\s", "");
+
+ final Matcher m = PATTERN.matcher(s);
+
+ if (m.find()) {
+ return m.group();
+ }
+ return "";
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRule.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRule.java
new file mode 100644
index 000000000..f6685f19d
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRule.java
@@ -0,0 +1,27 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// https://ror.readme.io/docs/ror-identifier-pattern
+public class RorCleaningRule {
+
+ public static final String ROR_PREFIX = "https://ror.org/";
+
+ private static final Pattern PATTERN = Pattern.compile("(?0[a-hj-km-np-tv-z|0-9]{6}[0-9]{2})");
+
+ public static String clean(String ror) {
+ String s = ror
+ .replaceAll("\\s", "")
+ .toLowerCase();
+
+ Matcher m = PATTERN.matcher(s);
+
+ if (m.find()) {
+ return ROR_PREFIX + m.group("ror");
+ }
+ return "";
+ }
+
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/SubjectProvenanceComparator.java b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/SubjectProvenanceComparator.java
new file mode 100644
index 000000000..f4e3c8841
--- /dev/null
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/schema/oaf/utils/SubjectProvenanceComparator.java
@@ -0,0 +1,46 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils.getProvenance;
+import static org.apache.commons.lang3.StringUtils.isBlank;
+
+import java.util.Comparator;
+
+import eu.dnetlib.dhp.schema.oaf.Subject;
+
+public class SubjectProvenanceComparator implements Comparator {
+
+ @Override
+ public int compare(Subject left, Subject right) {
+
+ String lProv = getProvenance(left.getDataInfo());
+ String rProv = getProvenance(right.getDataInfo());
+
+ if (isBlank(lProv) && isBlank(rProv))
+ return 0;
+ if (isBlank(lProv))
+ return 1;
+ if (isBlank(rProv))
+ return -1;
+ if (lProv.equals(rProv))
+ return 0;
+ if (lProv.toLowerCase().contains("crosswalk"))
+ return -1;
+ if (rProv.toLowerCase().contains("crosswalk"))
+ return 1;
+ if (lProv.toLowerCase().contains("user"))
+ return -1;
+ if (rProv.toLowerCase().contains("user"))
+ return 1;
+ if (lProv.toLowerCase().contains("propagation"))
+ return -1;
+ if (rProv.toLowerCase().contains("propagation"))
+ return 1;
+ if (lProv.toLowerCase().contains("iis"))
+ return -1;
+ if (rProv.toLowerCase().contains("iis"))
+ return 1;
+
+ return 0;
+ }
+}
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
index 5a59bc0df..e10d0c500 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/DHPUtils.java
@@ -75,9 +75,14 @@ public class DHPUtils {
final HttpGet req = new HttpGet(url);
+ log.info("MDStoreManager request: {}", req);
+
try (final CloseableHttpClient client = HttpClients.createDefault()) {
try (final CloseableHttpResponse response = client.execute(req)) {
final String json = IOUtils.toString(response.getEntity().getContent());
+
+ log.info("MDStoreManager response: {}", json);
+
final MDStoreWithInfo[] mdstores = objectMapper.readValue(json, MDStoreWithInfo[].class);
return Arrays
.stream(mdstores)
diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/input_maketar_parameters.json b/dhp-common/src/main/resources/eu/dnetlib/dhp/common/input_maketar_parameters.json
similarity index 100%
rename from dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/dump/input_maketar_parameters.json
rename to dhp-common/src/main/resources/eu/dnetlib/dhp/common/input_maketar_parameters.json
diff --git a/dhp-common/src/main/resources/eu/dnetlib/dhp/common/name_particles.txt b/dhp-common/src/main/resources/eu/dnetlib/dhp/common/name_particles.txt
new file mode 100644
index 000000000..07cf06a98
--- /dev/null
+++ b/dhp-common/src/main/resources/eu/dnetlib/dhp/common/name_particles.txt
@@ -0,0 +1,8 @@
+van
+von
+der
+de
+dell
+sig
+mr
+mrs
\ No newline at end of file
diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/resources/eu/dnetlib/dhp/oa/dedup/dispatch_entities_parameters.json b/dhp-common/src/main/resources/eu/dnetlib/dhp/oa/merge/dispatch_entities_parameters.json
similarity index 79%
rename from dhp-workflows/dhp-dedup-openaire/src/main/resources/eu/dnetlib/dhp/oa/dedup/dispatch_entities_parameters.json
rename to dhp-common/src/main/resources/eu/dnetlib/dhp/oa/merge/dispatch_entities_parameters.json
index aa8d2a7c2..60f11ac84 100644
--- a/dhp-workflows/dhp-dedup-openaire/src/main/resources/eu/dnetlib/dhp/oa/dedup/dispatch_entities_parameters.json
+++ b/dhp-common/src/main/resources/eu/dnetlib/dhp/oa/merge/dispatch_entities_parameters.json
@@ -18,9 +18,9 @@
"paramRequired": true
},
{
- "paramName": "c",
- "paramLongName": "graphTableClassName",
- "paramDescription": "the graph entity class name",
+ "paramName": "fi",
+ "paramLongName": "filterInvisible",
+ "paramDescription": "if true filters out invisible entities",
"paramRequired": true
}
]
\ No newline at end of file
diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/resources/eu/dnetlib/dhp/oa/dedup/group_graph_entities_parameters.json b/dhp-common/src/main/resources/eu/dnetlib/dhp/oa/merge/group_graph_entities_parameters.json
similarity index 100%
rename from dhp-workflows/dhp-dedup-openaire/src/main/resources/eu/dnetlib/dhp/oa/dedup/group_graph_entities_parameters.json
rename to dhp-common/src/main/resources/eu/dnetlib/dhp/oa/merge/group_graph_entities_parameters.json
diff --git a/dhp-common/src/main/scala/eu/dnetlib/dhp/application/SparkScalaApplication.scala b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/SparkScalaApplication.scala
new file mode 100644
index 000000000..f8afe9af4
--- /dev/null
+++ b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/SparkScalaApplication.scala
@@ -0,0 +1,73 @@
+package eu.dnetlib.dhp.application
+
+import scala.io.Source
+
+/** This is the main Interface SparkApplication
+ * where all the Spark Scala class should inherit
+ */
+trait SparkScalaApplication {
+
+ /** This is the path in the classpath of the json
+ * describes all the argument needed to run
+ */
+ val propertyPath: String
+
+ /** Utility to parse the arguments using the
+ * property json in the classpath identified from
+ * the variable propertyPath
+ *
+ * @param args the list of arguments
+ */
+ def parseArguments(args: Array[String]): ArgumentApplicationParser = {
+ val parser = new ArgumentApplicationParser(
+ Source.fromInputStream(getClass.getResourceAsStream(propertyPath)).mkString
+ )
+ parser.parseArgument(args)
+ parser
+ }
+
+ /** Here all the spark applications runs this method
+ * where the whole logic of the spark node is defined
+ */
+ def run(): Unit
+}
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+import org.slf4j.Logger
+
+abstract class AbstractScalaApplication(
+ val propertyPath: String,
+ val args: Array[String],
+ log: Logger
+) extends SparkScalaApplication {
+
+ var parser: ArgumentApplicationParser = null
+
+ var spark: SparkSession = null
+
+ def initialize(): SparkScalaApplication = {
+ parser = parseArguments(args)
+ spark = createSparkSession()
+ this
+ }
+
+ /** Utility for creating a spark session starting from parser
+ *
+ * @return a spark Session
+ */
+ private def createSparkSession(): SparkSession = {
+ require(parser != null)
+
+ val conf: SparkConf = new SparkConf()
+ val master = parser.get("master")
+ log.info(s"Creating Spark session: Master: $master")
+ SparkSession
+ .builder()
+ .config(conf)
+ .appName(getClass.getSimpleName)
+ .master(master)
+ .getOrCreate()
+ }
+
+}
diff --git a/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogModel.scala b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogModel.scala
new file mode 100644
index 000000000..d74ec3f69
--- /dev/null
+++ b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogModel.scala
@@ -0,0 +1,10 @@
+package eu.dnetlib.dhp.application.dedup.log
+
+case class DedupLogModel(
+ tag: String,
+ configuration: String,
+ entity: String,
+ startTS: Long,
+ endTS: Long,
+ totalMs: Long
+) {}
diff --git a/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogWriter.scala b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogWriter.scala
new file mode 100644
index 000000000..4409c01d9
--- /dev/null
+++ b/dhp-common/src/main/scala/eu/dnetlib/dhp/application/dedup/log/DedupLogWriter.scala
@@ -0,0 +1,14 @@
+package eu.dnetlib.dhp.application.dedup.log
+
+import org.apache.spark.sql.{SaveMode, SparkSession}
+
+class DedupLogWriter(path: String) {
+
+ def appendLog(dedupLogModel: DedupLogModel, spark: SparkSession): Unit = {
+ import spark.implicits._
+ val df = spark.createDataset[DedupLogModel](data = List(dedupLogModel))
+ df.write.mode(SaveMode.Append).save(path)
+
+ }
+
+}
diff --git a/dhp-common/src/main/scala/eu/dnetlib/dhp/sx/graph/scholix/ScholixUtils.scala b/dhp-common/src/main/scala/eu/dnetlib/dhp/sx/graph/scholix/ScholixUtils.scala
new file mode 100644
index 000000000..a995016a8
--- /dev/null
+++ b/dhp-common/src/main/scala/eu/dnetlib/dhp/sx/graph/scholix/ScholixUtils.scala
@@ -0,0 +1,442 @@
+package eu.dnetlib.dhp.sx.graph.scholix
+
+import eu.dnetlib.dhp.schema.oaf.{Publication, Relation, Result, StructuredProperty}
+import eu.dnetlib.dhp.schema.sx.scholix._
+import eu.dnetlib.dhp.schema.sx.summary.{CollectedFromType, SchemeValue, ScholixSummary, Typology}
+import eu.dnetlib.dhp.utils.DHPUtils
+import org.apache.spark.sql.expressions.Aggregator
+import org.apache.spark.sql.{Encoder, Encoders}
+import org.json4s
+import org.json4s.DefaultFormats
+import org.json4s.jackson.JsonMethods.parse
+import scala.collection.JavaConverters._
+import scala.io.Source
+
+object ScholixUtils extends Serializable {
+
+ val DNET_IDENTIFIER_SCHEMA: String = "DNET Identifier"
+
+ val DATE_RELATION_KEY: String = "RelationDate"
+
+ case class RelationVocabulary(original: String, inverse: String) {}
+
+ case class RelatedEntities(id: String, relatedDataset: Long, relatedPublication: Long) {}
+
+ val relations: Map[String, RelationVocabulary] = {
+ val input = Source
+ .fromInputStream(
+ getClass.getResourceAsStream("/eu/dnetlib/scholexplorer/relation/relations.json")
+ )
+ .mkString
+ implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
+
+ lazy val json: json4s.JValue = parse(input)
+
+ json.extract[Map[String, RelationVocabulary]]
+ }
+
+ def extractRelationDate(relation: Relation): String = {
+
+ if (relation.getProperties == null || !relation.getProperties.isEmpty)
+ null
+ else {
+ val date = relation.getProperties.asScala
+ .find(p => DATE_RELATION_KEY.equalsIgnoreCase(p.getKey))
+ .map(p => p.getValue)
+ if (date.isDefined)
+ date.get
+ else
+ null
+ }
+ }
+
+ def extractRelationDate(summary: ScholixSummary): String = {
+
+ if (summary.getDate == null || summary.getDate.isEmpty)
+ null
+ else {
+ summary.getDate.get(0)
+ }
+ }
+
+ def inverseRelationShip(rel: ScholixRelationship): ScholixRelationship = {
+ new ScholixRelationship(rel.getInverse, rel.getSchema, rel.getName)
+
+ }
+
+ def generateScholixResourceFromResult(r: Result): ScholixResource = {
+ generateScholixResourceFromSummary(ScholixUtils.resultToSummary(r))
+ }
+
+ val statsAggregator: Aggregator[(String, String, Long), RelatedEntities, RelatedEntities] =
+ new Aggregator[(String, String, Long), RelatedEntities, RelatedEntities] with Serializable {
+ override def zero: RelatedEntities = null
+
+ override def reduce(b: RelatedEntities, a: (String, String, Long)): RelatedEntities = {
+ val relatedDataset = if ("dataset".equalsIgnoreCase(a._2)) a._3 else 0
+ val relatedPublication = if ("publication".equalsIgnoreCase(a._2)) a._3 else 0
+
+ if (b == null)
+ RelatedEntities(a._1, relatedDataset, relatedPublication)
+ else
+ RelatedEntities(
+ a._1,
+ b.relatedDataset + relatedDataset,
+ b.relatedPublication + relatedPublication
+ )
+ }
+
+ override def merge(b1: RelatedEntities, b2: RelatedEntities): RelatedEntities = {
+ if (b1 != null && b2 != null)
+ RelatedEntities(
+ b1.id,
+ b1.relatedDataset + b2.relatedDataset,
+ b1.relatedPublication + b2.relatedPublication
+ )
+ else if (b1 != null)
+ b1
+ else
+ b2
+ }
+
+ override def finish(reduction: RelatedEntities): RelatedEntities = reduction
+
+ override def bufferEncoder: Encoder[RelatedEntities] = Encoders.bean(classOf[RelatedEntities])
+
+ override def outputEncoder: Encoder[RelatedEntities] = Encoders.bean(classOf[RelatedEntities])
+ }
+
+ val scholixAggregator: Aggregator[(String, Scholix), Scholix, Scholix] =
+ new Aggregator[(String, Scholix), Scholix, Scholix] with Serializable {
+ override def zero: Scholix = null
+
+ def scholix_complete(s: Scholix): Boolean = {
+ if (s == null || s.getIdentifier == null) {
+ false
+ } else if (s.getSource == null || s.getTarget == null) {
+ false
+ } else if (s.getLinkprovider == null || s.getLinkprovider.isEmpty)
+ false
+ else
+ true
+ }
+
+ override def reduce(b: Scholix, a: (String, Scholix)): Scholix = {
+ if (scholix_complete(b)) b else a._2
+ }
+
+ override def merge(b1: Scholix, b2: Scholix): Scholix = {
+ if (scholix_complete(b1)) b1 else b2
+ }
+
+ override def finish(reduction: Scholix): Scholix = reduction
+
+ override def bufferEncoder: Encoder[Scholix] = Encoders.kryo[Scholix]
+
+ override def outputEncoder: Encoder[Scholix] = Encoders.kryo[Scholix]
+ }
+
+ def createInverseScholixRelation(scholix: Scholix): Scholix = {
+ val s = new Scholix
+ s.setPublicationDate(scholix.getPublicationDate)
+ s.setPublisher(scholix.getPublisher)
+ s.setLinkprovider(scholix.getLinkprovider)
+ s.setRelationship(inverseRelationShip(scholix.getRelationship))
+ s.setSource(scholix.getTarget)
+ s.setTarget(scholix.getSource)
+ s.setIdentifier(
+ DHPUtils.md5(
+ s"${s.getSource.getIdentifier}::${s.getRelationship.getName}::${s.getTarget.getIdentifier}"
+ )
+ )
+ s
+
+ }
+
+ def extractCollectedFrom(summary: ScholixResource): List[ScholixEntityId] = {
+ if (summary.getCollectedFrom != null && !summary.getCollectedFrom.isEmpty) {
+ val l: List[ScholixEntityId] = summary.getCollectedFrom.asScala.map { d =>
+ new ScholixEntityId(d.getProvider.getName, d.getProvider.getIdentifiers)
+ }(collection.breakOut)
+ l
+ } else List()
+ }
+
+ def extractCollectedFrom(summary: ScholixSummary): List[ScholixEntityId] = {
+ if (summary.getDatasources != null && !summary.getDatasources.isEmpty) {
+ val l: List[ScholixEntityId] = summary.getDatasources.asScala.map { d =>
+ new ScholixEntityId(
+ d.getDatasourceName,
+ List(new ScholixIdentifier(d.getDatasourceId, "DNET Identifier", null)).asJava
+ )
+ }(collection.breakOut)
+ l
+ } else List()
+ }
+
+ def extractCollectedFrom(relation: Relation): List[ScholixEntityId] = {
+ if (relation.getCollectedfrom != null && !relation.getCollectedfrom.isEmpty) {
+
+ val l: List[ScholixEntityId] = relation.getCollectedfrom.asScala.map { c =>
+ new ScholixEntityId(
+ c.getValue,
+ List(new ScholixIdentifier(c.getKey, DNET_IDENTIFIER_SCHEMA, null)).asJava
+ )
+ }.toList
+ l
+ } else List()
+ }
+
+ def generateCompleteScholix(scholix: Scholix, target: ScholixSummary): Scholix = {
+ val s = new Scholix
+ s.setPublicationDate(scholix.getPublicationDate)
+ s.setPublisher(scholix.getPublisher)
+ s.setLinkprovider(scholix.getLinkprovider)
+ s.setRelationship(scholix.getRelationship)
+ s.setSource(scholix.getSource)
+ s.setTarget(generateScholixResourceFromSummary(target))
+ s.setIdentifier(
+ DHPUtils.md5(
+ s"${s.getSource.getIdentifier}::${s.getRelationship.getName}::${s.getTarget.getIdentifier}"
+ )
+ )
+ s
+ }
+
+ def generateCompleteScholix(scholix: Scholix, target: ScholixResource): Scholix = {
+ val s = new Scholix
+ s.setPublicationDate(scholix.getPublicationDate)
+ s.setPublisher(scholix.getPublisher)
+ s.setLinkprovider(scholix.getLinkprovider)
+ s.setRelationship(scholix.getRelationship)
+ s.setSource(scholix.getSource)
+ s.setTarget(target)
+ s.setIdentifier(
+ DHPUtils.md5(
+ s"${s.getSource.getIdentifier}::${s.getRelationship.getName}::${s.getTarget.getIdentifier}"
+ )
+ )
+ s
+ }
+
+ def generateScholixResourceFromSummary(summaryObject: ScholixSummary): ScholixResource = {
+ val r = new ScholixResource
+ r.setIdentifier(summaryObject.getLocalIdentifier)
+ r.setDnetIdentifier(summaryObject.getId)
+
+ r.setObjectType(summaryObject.getTypology.toString)
+ r.setObjectSubType(summaryObject.getSubType)
+
+ if (summaryObject.getTitle != null && !summaryObject.getTitle.isEmpty)
+ r.setTitle(summaryObject.getTitle.get(0))
+
+ if (summaryObject.getAuthor != null && !summaryObject.getAuthor.isEmpty) {
+ val l: List[ScholixEntityId] =
+ summaryObject.getAuthor.asScala.map(a => new ScholixEntityId(a, null)).toList
+ if (l.nonEmpty)
+ r.setCreator(l.asJava)
+ }
+
+ if (summaryObject.getDate != null && !summaryObject.getDate.isEmpty)
+ r.setPublicationDate(summaryObject.getDate.get(0))
+ if (summaryObject.getPublisher != null && !summaryObject.getPublisher.isEmpty) {
+ val plist: List[ScholixEntityId] =
+ summaryObject.getPublisher.asScala.map(p => new ScholixEntityId(p, null)).toList
+
+ if (plist.nonEmpty)
+ r.setPublisher(plist.asJava)
+ }
+
+ if (summaryObject.getDatasources != null && !summaryObject.getDatasources.isEmpty) {
+
+ val l: List[ScholixCollectedFrom] = summaryObject.getDatasources.asScala
+ .map(c =>
+ new ScholixCollectedFrom(
+ new ScholixEntityId(
+ c.getDatasourceName,
+ List(new ScholixIdentifier(c.getDatasourceId, DNET_IDENTIFIER_SCHEMA, null)).asJava
+ ),
+ "collected",
+ "complete"
+ )
+ )
+ .toList
+
+ if (l.nonEmpty)
+ r.setCollectedFrom(l.asJava)
+
+ }
+ r
+ }
+
+ def scholixFromSource(relation: Relation, source: ScholixResource): Scholix = {
+ if (relation == null || source == null)
+ return null
+ val s = new Scholix
+ var l: List[ScholixEntityId] = extractCollectedFrom(relation)
+ if (l.isEmpty)
+ l = extractCollectedFrom(source)
+ if (l.isEmpty)
+ return null
+ s.setLinkprovider(l.asJava)
+ var d = extractRelationDate(relation)
+ if (d == null)
+ d = source.getPublicationDate
+
+ s.setPublicationDate(d)
+
+ if (source.getPublisher != null && !source.getPublisher.isEmpty) {
+ s.setPublisher(source.getPublisher)
+ }
+
+ val semanticRelation = relations.getOrElse(relation.getRelClass.toLowerCase, null)
+ if (semanticRelation == null)
+ return null
+ s.setRelationship(
+ new ScholixRelationship(semanticRelation.original, "datacite", semanticRelation.inverse)
+ )
+ s.setSource(source)
+
+ s
+ }
+
+ def scholixFromSource(relation: Relation, source: ScholixSummary): Scholix = {
+
+ if (relation == null || source == null)
+ return null
+
+ val s = new Scholix
+
+ var l: List[ScholixEntityId] = extractCollectedFrom(relation)
+ if (l.isEmpty)
+ l = extractCollectedFrom(source)
+ if (l.isEmpty)
+ return null
+
+ s.setLinkprovider(l.asJava)
+
+ var d = extractRelationDate(relation)
+ if (d == null)
+ d = extractRelationDate(source)
+
+ s.setPublicationDate(d)
+
+ if (source.getPublisher != null && !source.getPublisher.isEmpty) {
+ val l: List[ScholixEntityId] = source.getPublisher.asScala
+ .map { p =>
+ new ScholixEntityId(p, null)
+ }(collection.breakOut)
+
+ if (l.nonEmpty)
+ s.setPublisher(l.asJava)
+ }
+
+ val semanticRelation = relations.getOrElse(relation.getRelClass.toLowerCase, null)
+ if (semanticRelation == null)
+ return null
+ s.setRelationship(
+ new ScholixRelationship(semanticRelation.original, "datacite", semanticRelation.inverse)
+ )
+ s.setSource(generateScholixResourceFromSummary(source))
+
+ s
+ }
+
+ def findURLForPID(
+ pidValue: List[StructuredProperty],
+ urls: List[String]
+ ): List[(StructuredProperty, String)] = {
+ pidValue.map { p =>
+ val pv = p.getValue
+
+ val r = urls.find(u => u.toLowerCase.contains(pv.toLowerCase))
+ (p, r.orNull)
+ }
+ }
+
+ def extractTypedIdentifierFromInstance(r: Result): List[ScholixIdentifier] = {
+ if (r.getInstance() == null || r.getInstance().isEmpty)
+ return List()
+ r.getInstance()
+ .asScala
+ .filter(i => i.getUrl != null && !i.getUrl.isEmpty)
+ .filter(i => i.getPid != null && i.getUrl != null)
+ .flatMap(i => findURLForPID(i.getPid.asScala.toList, i.getUrl.asScala.toList))
+ .map(i => new ScholixIdentifier(i._1.getValue, i._1.getQualifier.getClassid, i._2))
+ .distinct
+ .toList
+ }
+
+ def resultToSummary(r: Result): ScholixSummary = {
+ val s = new ScholixSummary
+ s.setId(r.getId)
+ if (r.getPid == null || r.getPid.isEmpty)
+ return null
+
+ val persistentIdentifiers: List[ScholixIdentifier] = extractTypedIdentifierFromInstance(r)
+ if (persistentIdentifiers.isEmpty)
+ return null
+ s.setLocalIdentifier(persistentIdentifiers.asJava)
+ if (r.isInstanceOf[Publication])
+ s.setTypology(Typology.publication)
+ else
+ s.setTypology(Typology.dataset)
+
+ s.setSubType(r.getInstance().get(0).getInstancetype.getClassname)
+
+ if (r.getTitle != null && r.getTitle.asScala.nonEmpty) {
+ val titles: List[String] = r.getTitle.asScala.map(t => t.getValue).toList
+ if (titles.nonEmpty)
+ s.setTitle(titles.asJava)
+ else
+ return null
+ }
+
+ if (r.getAuthor != null && !r.getAuthor.isEmpty) {
+ val authors: List[String] = r.getAuthor.asScala.map(a => a.getFullname).toList
+ if (authors.nonEmpty)
+ s.setAuthor(authors.asJava)
+ }
+ if (r.getInstance() != null) {
+ val dt: List[String] = r
+ .getInstance()
+ .asScala
+ .filter(i => i.getDateofacceptance != null)
+ .map(i => i.getDateofacceptance.getValue)
+ .toList
+ if (dt.nonEmpty)
+ s.setDate(dt.distinct.asJava)
+ }
+ if (r.getDescription != null && !r.getDescription.isEmpty) {
+ val d = r.getDescription.asScala.find(f => f != null && f.getValue != null)
+ if (d.isDefined)
+ s.setDescription(d.get.getValue)
+ }
+
+ if (r.getSubject != null && !r.getSubject.isEmpty) {
+ val subjects: List[SchemeValue] = r.getSubject.asScala
+ .map(s => new SchemeValue(s.getQualifier.getClassname, s.getValue))
+ .toList
+ if (subjects.nonEmpty)
+ s.setSubject(subjects.asJava)
+ }
+
+ if (r.getPublisher != null)
+ s.setPublisher(List(r.getPublisher.getValue).asJava)
+
+ if (r.getCollectedfrom != null && !r.getCollectedfrom.isEmpty) {
+ val cf: List[CollectedFromType] = r.getCollectedfrom.asScala
+ .map(c => new CollectedFromType(c.getValue, c.getKey, "complete"))
+ .toList
+ if (cf.nonEmpty)
+ s.setDatasources(cf.distinct.asJava)
+ }
+
+ s.setRelatedDatasets(0)
+ s.setRelatedPublications(0)
+ s.setRelatedUnknown(0)
+
+ s
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/common/MdStoreClientTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/common/MdStoreClientTest.java
new file mode 100644
index 000000000..f87f6e313
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/common/MdStoreClientTest.java
@@ -0,0 +1,36 @@
+
+package eu.dnetlib.dhp.common;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+
+import org.junit.jupiter.api.Test;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class MdStoreClientTest {
+
+ // @Test
+ public void testMongoCollection() throws IOException {
+ final MdstoreClient client = new MdstoreClient("mongodb://localhost:27017", "mdstore");
+
+ final ObjectMapper mapper = new ObjectMapper();
+
+ final List infos = client.mdStoreWithTimestamp("ODF", "store", "cleaned");
+
+ infos.forEach(System.out::println);
+
+ final String s = mapper.writeValueAsString(infos);
+
+ Path fileName = Paths.get("/Users/sandro/mdstore_info.json");
+
+ // Writing into the file
+ Files.write(fileName, s.getBytes(StandardCharsets.UTF_8));
+
+ }
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/common/api/ZenodoAPIClientTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/common/api/ZenodoAPIClientTest.java
index 2ccaed3e4..92c1dcda3 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/common/api/ZenodoAPIClientTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/common/api/ZenodoAPIClientTest.java
@@ -33,7 +33,7 @@ class ZenodoAPIClientTest {
InputStream is = new FileInputStream(file);
- Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz", file.length()));
+ Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
@@ -56,7 +56,7 @@ class ZenodoAPIClientTest {
InputStream is = new FileInputStream(file);
- Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz", file.length()));
+ Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
@@ -80,7 +80,7 @@ class ZenodoAPIClientTest {
InputStream is = new FileInputStream(file);
- Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition", file.length()));
+ Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
Assertions.assertEquals(202, client.publish());
@@ -100,7 +100,7 @@ class ZenodoAPIClientTest {
InputStream is = new FileInputStream(file);
- Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition", file.length()));
+ Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
Assertions.assertEquals(202, client.publish());
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/oa/merge/AuthorMergerTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/oa/merge/AuthorMergerTest.java
deleted file mode 100644
index 3a7a41a1b..000000000
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/oa/merge/AuthorMergerTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-
-package eu.dnetlib.dhp.oa.merge;
-
-import java.io.BufferedReader;
-import java.io.FileReader;
-import java.io.IOException;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-import eu.dnetlib.dhp.schema.oaf.Author;
-import eu.dnetlib.dhp.schema.oaf.Publication;
-import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
-import eu.dnetlib.pace.util.MapDocumentUtil;
-import scala.Tuple2;
-
-class AuthorMergerTest {
-
- private String publicationsBasePath;
-
- private List> authors;
-
- @BeforeEach
- public void setUp() throws Exception {
-
- publicationsBasePath = Paths
- .get(AuthorMergerTest.class.getResource("/eu/dnetlib/dhp/oa/merge").toURI())
- .toFile()
- .getAbsolutePath();
-
- authors = readSample(publicationsBasePath + "/publications_with_authors.json", Publication.class)
- .stream()
- .map(p -> p._2().getAuthor())
- .collect(Collectors.toList());
-
- }
-
- @Test
- void mergeTest() { // used in the dedup: threshold set to 0.95
-
- for (List authors1 : authors) {
- System.out.println("List " + (authors.indexOf(authors1) + 1));
- for (Author author : authors1) {
- System.out.println(authorToString(author));
- }
- }
-
- List merge = AuthorMerger.merge(authors);
-
- System.out.println("Merge ");
- for (Author author : merge) {
- System.out.println(authorToString(author));
- }
-
- Assertions.assertEquals(7, merge.size());
-
- }
-
- public List> readSample(String path, Class clazz) {
- List> res = new ArrayList<>();
- BufferedReader reader;
- try {
- reader = new BufferedReader(new FileReader(path));
- String line = reader.readLine();
- while (line != null) {
- res
- .add(
- new Tuple2<>(
- MapDocumentUtil.getJPathString("$.id", line),
- new ObjectMapper().readValue(line, clazz)));
- // read next line
- line = reader.readLine();
- }
- reader.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- return res;
- }
-
- public String authorToString(Author a) {
-
- String print = "Fullname = ";
- print += a.getFullname() + " pid = [";
- if (a.getPid() != null)
- for (StructuredProperty sp : a.getPid()) {
- print += sp.toComparableString() + " ";
- }
- print += "]";
- return print;
- }
-}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRuleTest.java
new file mode 100644
index 000000000..1b9163d46
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/GridCleaningRuleTest.java
@@ -0,0 +1,18 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class GridCleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("grid.493784.5", GridCleaningRule.clean("grid.493784.5"));
+ assertEquals("grid.493784.5x", GridCleaningRule.clean("grid.493784.5x"));
+ assertEquals("grid.493784.5x", GridCleaningRule.clean("493784.5x"));
+ assertEquals("", GridCleaningRule.clean("493x784.5x"));
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRuleTest.java
new file mode 100644
index 000000000..e51d1e05c
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/ISNICleaningRuleTest.java
@@ -0,0 +1,19 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class ISNICleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("0000000463436020", ISNICleaningRule.clean("0000 0004 6343 6020"));
+ assertEquals("0000000463436020", ISNICleaningRule.clean("0000000463436020"));
+ assertEquals("", ISNICleaningRule.clean("Q30256598"));
+ assertEquals("0000000493403529", ISNICleaningRule.clean("ISNI:0000000493403529"));
+ assertEquals("000000008614884X", ISNICleaningRule.clean("0000 0000 8614 884X"));
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java
index 4068f0abb..9111ac2df 100644
--- a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/OafMapperUtilsTest.java
@@ -44,105 +44,104 @@ class OafMapperUtilsTest {
@Test
void testDateValidation() {
- assertTrue(GraphCleaningFunctions.doCleanDate("2016-05-07T12:41:19.202Z ").isPresent());
- assertTrue(GraphCleaningFunctions.doCleanDate("2020-09-10 11:08:52 ").isPresent());
- assertTrue(GraphCleaningFunctions.doCleanDate(" 2016-04-05").isPresent());
+ assertNotNull(GraphCleaningFunctions.cleanDate("2016-05-07T12:41:19.202Z "));
+ assertNotNull(GraphCleaningFunctions.cleanDate("2020-09-10 11:08:52 "));
+ assertNotNull(GraphCleaningFunctions.cleanDate(" 2016-04-05"));
- assertEquals("2016-04-05", GraphCleaningFunctions.doCleanDate("2016 Apr 05").get());
+ assertEquals("2016-04-05", GraphCleaningFunctions.cleanDate("2016 Apr 05"));
- assertEquals("2009-05-08", GraphCleaningFunctions.doCleanDate("May 8, 2009 5:57:51 PM").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("oct 7, 1970").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("oct 7, '70").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("oct. 7, 1970").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("oct. 7, 70").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Mon Jan 2 15:04:05 2006").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Mon Jan 2 15:04:05 MST 2006").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Mon Jan 02 15:04:05 -0700 2006").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Monday, 02-Jan-06 15:04:05 MST").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Mon, 02 Jan 2006 15:04:05 MST").get());
- assertEquals("2017-07-11", GraphCleaningFunctions.doCleanDate("Tue, 11 Jul 2017 16:28:13 +0200 (CEST)").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("Mon, 02 Jan 2006 15:04:05 -0700").get());
- assertEquals("2018-01-04", GraphCleaningFunctions.doCleanDate("Thu, 4 Jan 2018 17:53:36 +0000").get());
- assertEquals("2015-08-10", GraphCleaningFunctions.doCleanDate("Mon Aug 10 15:44:11 UTC+0100 2015").get());
+ assertEquals("2009-05-08", GraphCleaningFunctions.cleanDate("May 8, 2009 5:57:51 PM"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("oct 7, 1970"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("oct 7, '70"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("oct. 7, 1970"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("oct. 7, 70"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Mon Jan 2 15:04:05 2006"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Mon Jan 2 15:04:05 MST 2006"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Mon Jan 02 15:04:05 -0700 2006"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Monday, 02-Jan-06 15:04:05 MST"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Mon, 02 Jan 2006 15:04:05 MST"));
+ assertEquals("2017-07-11", GraphCleaningFunctions.cleanDate("Tue, 11 Jul 2017 16:28:13 +0200 (CEST)"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("Mon, 02 Jan 2006 15:04:05 -0700"));
+ assertEquals("2018-01-04", GraphCleaningFunctions.cleanDate("Thu, 4 Jan 2018 17:53:36 +0000"));
+ assertEquals("2015-08-10", GraphCleaningFunctions.cleanDate("Mon Aug 10 15:44:11 UTC+0100 2015"));
assertEquals(
"2015-07-03",
- GraphCleaningFunctions.doCleanDate("Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)").get());
- assertEquals("2012-09-17", GraphCleaningFunctions.doCleanDate("September 17, 2012 10:09am").get());
- assertEquals("2012-09-17", GraphCleaningFunctions.doCleanDate("September 17, 2012 at 10:09am PST-08").get());
- assertEquals("2012-09-17", GraphCleaningFunctions.doCleanDate("September 17, 2012, 10:10:09").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("October 7, 1970").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("October 7th, 1970").get());
- assertEquals("2006-02-12", GraphCleaningFunctions.doCleanDate("12 Feb 2006, 19:17").get());
- assertEquals("2006-02-12", GraphCleaningFunctions.doCleanDate("12 Feb 2006 19:17").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("7 oct 70").get());
- assertEquals("1970-10-07", GraphCleaningFunctions.doCleanDate("7 oct 1970").get());
- assertEquals("2013-02-03", GraphCleaningFunctions.doCleanDate("03 February 2013").get());
- assertEquals("2013-07-01", GraphCleaningFunctions.doCleanDate("1 July 2013").get());
- assertEquals("2013-02-03", GraphCleaningFunctions.doCleanDate("2013-Feb-03").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("3/31/2014").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("03/31/2014").get());
- assertEquals("1971-08-21", GraphCleaningFunctions.doCleanDate("08/21/71").get());
- assertEquals("1971-01-08", GraphCleaningFunctions.doCleanDate("8/1/71").get());
- assertEquals("2014-08-04", GraphCleaningFunctions.doCleanDate("4/8/2014 22:05").get());
- assertEquals("2014-08-04", GraphCleaningFunctions.doCleanDate("04/08/2014 22:05").get());
- assertEquals("2014-08-04", GraphCleaningFunctions.doCleanDate("4/8/14 22:05").get());
- assertEquals("2014-02-04", GraphCleaningFunctions.doCleanDate("04/2/2014 03:00:51").get());
- assertEquals("1965-08-08", GraphCleaningFunctions.doCleanDate("8/8/1965 12:00:00 AM").get());
- assertEquals("1965-08-08", GraphCleaningFunctions.doCleanDate("8/8/1965 01:00:01 PM").get());
- assertEquals("1965-08-08", GraphCleaningFunctions.doCleanDate("8/8/1965 01:00 PM").get());
- assertEquals("1965-08-08", GraphCleaningFunctions.doCleanDate("8/8/1965 1:00 PM").get());
- assertEquals("1965-08-08", GraphCleaningFunctions.doCleanDate("8/8/1965 12:00 AM").get());
- assertEquals("2014-02-04", GraphCleaningFunctions.doCleanDate("4/02/2014 03:00:51").get());
- assertEquals("2012-03-19", GraphCleaningFunctions.doCleanDate("03/19/2012 10:11:59").get());
- assertEquals("2012-03-19", GraphCleaningFunctions.doCleanDate("03/19/2012 10:11:59.3186369").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("2014/3/31").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("2014/03/31").get());
- assertEquals("2014-04-08", GraphCleaningFunctions.doCleanDate("2014/4/8 22:05").get());
- assertEquals("2014-04-08", GraphCleaningFunctions.doCleanDate("2014/04/08 22:05").get());
- assertEquals("2014-04-02", GraphCleaningFunctions.doCleanDate("2014/04/2 03:00:51").get());
- assertEquals("2014-04-02", GraphCleaningFunctions.doCleanDate("2014/4/02 03:00:51").get());
- assertEquals("2012-03-19", GraphCleaningFunctions.doCleanDate("2012/03/19 10:11:59").get());
- assertEquals("2012-03-19", GraphCleaningFunctions.doCleanDate("2012/03/19 10:11:59.3186369").get());
- assertEquals("2014-04-08", GraphCleaningFunctions.doCleanDate("2014年04月08日").get());
- assertEquals("2006-01-02", GraphCleaningFunctions.doCleanDate("2006-01-02T15:04:05+0000").get());
- assertEquals("2009-08-13", GraphCleaningFunctions.doCleanDate("2009-08-12T22:15:09-07:00").get());
- assertEquals("2009-08-12", GraphCleaningFunctions.doCleanDate("2009-08-12T22:15:09").get());
- assertEquals("2009-08-12", GraphCleaningFunctions.doCleanDate("2009-08-12T22:15:09Z").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 17:24:37.3186369").get());
- assertEquals("2012-08-03", GraphCleaningFunctions.doCleanDate("2012-08-03 18:31:59.257000000").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 17:24:37.123").get());
- assertEquals("2013-04-01", GraphCleaningFunctions.doCleanDate("2013-04-01 22:43").get());
- assertEquals("2013-04-01", GraphCleaningFunctions.doCleanDate("2013-04-01 22:43:22").get());
- assertEquals("2014-12-16", GraphCleaningFunctions.doCleanDate("2014-12-16 06:20:00 UTC").get());
- assertEquals("2014-12-16", GraphCleaningFunctions.doCleanDate("2014-12-16 06:20:00 GMT").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 05:24:37 PM").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 13:13:43 +0800").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 13:13:43 +0800 +08").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26 13:13:44 +09:00").get());
- assertEquals("2012-08-03", GraphCleaningFunctions.doCleanDate("2012-08-03 18:31:59.257000000 +0000 UTC").get());
- assertEquals("2015-09-30", GraphCleaningFunctions.doCleanDate("2015-09-30 18:48:56.35272715 +0000 UTC").get());
- assertEquals("2015-02-18", GraphCleaningFunctions.doCleanDate("2015-02-18 00:12:00 +0000 GMT").get());
- assertEquals("2015-02-18", GraphCleaningFunctions.doCleanDate("2015-02-18 00:12:00 +0000 UTC").get());
+ GraphCleaningFunctions.cleanDate("Fri Jul 03 2015 18:04:07 GMT+0100 (GMT Daylight Time)"));
+ assertEquals("2012-09-17", GraphCleaningFunctions.cleanDate("September 17, 2012 10:09am"));
+ assertEquals("2012-09-17", GraphCleaningFunctions.cleanDate("September 17, 2012 at 10:09am PST-08"));
+ assertEquals("2012-09-17", GraphCleaningFunctions.cleanDate("September 17, 2012, 10:10:09"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("October 7, 1970"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("October 7th, 1970"));
+ assertEquals("2006-02-12", GraphCleaningFunctions.cleanDate("12 Feb 2006, 19:17"));
+ assertEquals("2006-02-12", GraphCleaningFunctions.cleanDate("12 Feb 2006 19:17"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("7 oct 70"));
+ assertEquals("1970-10-07", GraphCleaningFunctions.cleanDate("7 oct 1970"));
+ assertEquals("2013-02-03", GraphCleaningFunctions.cleanDate("03 February 2013"));
+ assertEquals("2013-07-01", GraphCleaningFunctions.cleanDate("1 July 2013"));
+ assertEquals("2013-02-03", GraphCleaningFunctions.cleanDate("2013-Feb-03"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("3/31/2014"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("03/31/2014"));
+ assertEquals("1971-08-21", GraphCleaningFunctions.cleanDate("08/21/71"));
+ assertEquals("1971-01-08", GraphCleaningFunctions.cleanDate("8/1/71"));
+ assertEquals("2014-08-04", GraphCleaningFunctions.cleanDate("4/8/2014 22:05"));
+ assertEquals("2014-08-04", GraphCleaningFunctions.cleanDate("04/08/2014 22:05"));
+ assertEquals("2014-08-04", GraphCleaningFunctions.cleanDate("4/8/14 22:05"));
+ assertEquals("2014-02-04", GraphCleaningFunctions.cleanDate("04/2/2014 03:00:51"));
+ assertEquals("1965-08-08", GraphCleaningFunctions.cleanDate("8/8/1965 12:00:00 AM"));
+ assertEquals("1965-08-08", GraphCleaningFunctions.cleanDate("8/8/1965 01:00:01 PM"));
+ assertEquals("1965-08-08", GraphCleaningFunctions.cleanDate("8/8/1965 01:00 PM"));
+ assertEquals("1965-08-08", GraphCleaningFunctions.cleanDate("8/8/1965 1:00 PM"));
+ assertEquals("1965-08-08", GraphCleaningFunctions.cleanDate("8/8/1965 12:00 AM"));
+ assertEquals("2014-02-04", GraphCleaningFunctions.cleanDate("4/02/2014 03:00:51"));
+ assertEquals("2012-03-19", GraphCleaningFunctions.cleanDate("03/19/2012 10:11:59"));
+ assertEquals("2012-03-19", GraphCleaningFunctions.cleanDate("03/19/2012 10:11:59.3186369"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("2014/3/31"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("2014/03/31"));
+ assertEquals("2014-04-08", GraphCleaningFunctions.cleanDate("2014/4/8 22:05"));
+ assertEquals("2014-04-08", GraphCleaningFunctions.cleanDate("2014/04/08 22:05"));
+ assertEquals("2014-04-02", GraphCleaningFunctions.cleanDate("2014/04/2 03:00:51"));
+ assertEquals("2014-04-02", GraphCleaningFunctions.cleanDate("2014/4/02 03:00:51"));
+ assertEquals("2012-03-19", GraphCleaningFunctions.cleanDate("2012/03/19 10:11:59"));
+ assertEquals("2012-03-19", GraphCleaningFunctions.cleanDate("2012/03/19 10:11:59.3186369"));
+ assertEquals("2014-04-08", GraphCleaningFunctions.cleanDate("2014年04月08日"));
+ assertEquals("2006-01-02", GraphCleaningFunctions.cleanDate("2006-01-02T15:04:05+0000"));
+ assertEquals("2009-08-13", GraphCleaningFunctions.cleanDate("2009-08-12T22:15:09-07:00"));
+ assertEquals("2009-08-12", GraphCleaningFunctions.cleanDate("2009-08-12T22:15:09"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 17:24:37.3186369"));
+ assertEquals("2012-08-03", GraphCleaningFunctions.cleanDate("2012-08-03 18:31:59.257000000"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 17:24:37.123"));
+ assertEquals("2013-04-01", GraphCleaningFunctions.cleanDate("2013-04-01 22:43"));
+ assertEquals("2013-04-01", GraphCleaningFunctions.cleanDate("2013-04-01 22:43:22"));
+ assertEquals("2014-12-16", GraphCleaningFunctions.cleanDate("2014-12-16 06:20:00 UTC"));
+ assertEquals("2014-12-16", GraphCleaningFunctions.cleanDate("2014-12-16 06:20:00 GMT"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 05:24:37 PM"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 13:13:43 +0800"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 13:13:43 +0800 +08"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26 13:13:44 +09:00"));
+ assertEquals("2012-08-03", GraphCleaningFunctions.cleanDate("2012-08-03 18:31:59.257000000 +0000 UTC"));
+ assertEquals("2015-09-30", GraphCleaningFunctions.cleanDate("2015-09-30 18:48:56.35272715 +0000 UTC"));
+ assertEquals("2015-02-18", GraphCleaningFunctions.cleanDate("2015-02-18 00:12:00 +0000 GMT"));
+ assertEquals("2015-02-18", GraphCleaningFunctions.cleanDate("2015-02-18 00:12:00 +0000 UTC"));
assertEquals(
- "2015-02-08", GraphCleaningFunctions.doCleanDate("2015-02-08 03:02:00 +0300 MSK m=+0.000000001").get());
+ "2015-02-08", GraphCleaningFunctions.cleanDate("2015-02-08 03:02:00 +0300 MSK m=+0.000000001"));
assertEquals(
- "2015-02-08", GraphCleaningFunctions.doCleanDate("2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001").get());
- assertEquals("2017-07-19", GraphCleaningFunctions.doCleanDate("2017-07-19 03:21:51+00:00").get());
- assertEquals("2014-04-26", GraphCleaningFunctions.doCleanDate("2014-04-26").get());
- assertEquals("2014-04-01", GraphCleaningFunctions.doCleanDate("2014-04").get());
- assertEquals("2014-01-01", GraphCleaningFunctions.doCleanDate("2014").get());
- assertEquals("2014-05-11", GraphCleaningFunctions.doCleanDate("2014-05-11 08:20:13,787").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("3.31.2014").get());
- assertEquals("2014-03-31", GraphCleaningFunctions.doCleanDate("03.31.2014").get());
- assertEquals("1971-08-21", GraphCleaningFunctions.doCleanDate("08.21.71").get());
- assertEquals("2014-03-01", GraphCleaningFunctions.doCleanDate("2014.03").get());
- assertEquals("2014-03-30", GraphCleaningFunctions.doCleanDate("2014.03.30").get());
- assertEquals("2014-06-01", GraphCleaningFunctions.doCleanDate("20140601").get());
- assertEquals("2014-07-22", GraphCleaningFunctions.doCleanDate("20140722105203").get());
- assertEquals("2012-03-19", GraphCleaningFunctions.doCleanDate("1332151919").get());
- assertEquals("2013-11-12", GraphCleaningFunctions.doCleanDate("1384216367189").get());
- assertEquals("2013-11-12", GraphCleaningFunctions.doCleanDate("1384216367111222").get());
- assertEquals("2013-11-12", GraphCleaningFunctions.doCleanDate("1384216367111222333").get());
+ "2015-02-08", GraphCleaningFunctions.cleanDate("2015-02-08 03:02:00.001 +0300 MSK m=+0.000000001"));
+ assertEquals("2017-07-19", GraphCleaningFunctions.cleanDate("2017-07-19 03:21:51+00:00"));
+ assertEquals("2014-04-26", GraphCleaningFunctions.cleanDate("2014-04-26"));
+ assertEquals("2014-04-01", GraphCleaningFunctions.cleanDate("2014-04"));
+ assertEquals("2014-01-01", GraphCleaningFunctions.cleanDate("2014"));
+ assertEquals("2014-05-11", GraphCleaningFunctions.cleanDate("2014-05-11 08:20:13,787"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("3.31.2014"));
+ assertEquals("2014-03-31", GraphCleaningFunctions.cleanDate("03.31.2014"));
+ assertEquals("1971-08-21", GraphCleaningFunctions.cleanDate("08.21.71"));
+ assertEquals("2014-03-01", GraphCleaningFunctions.cleanDate("2014.03"));
+ assertEquals("2014-03-30", GraphCleaningFunctions.cleanDate("2014.03.30"));
+ assertEquals("2014-06-01", GraphCleaningFunctions.cleanDate("20140601"));
+ assertEquals("2014-07-22", GraphCleaningFunctions.cleanDate("20140722105203"));
+ assertEquals("2012-03-19", GraphCleaningFunctions.cleanDate("1332151919"));
+ assertEquals("2013-11-12", GraphCleaningFunctions.cleanDate("1384216367189"));
+ assertEquals("2013-11-12", GraphCleaningFunctions.cleanDate("1384216367111222"));
+ assertEquals("2013-11-12", GraphCleaningFunctions.cleanDate("1384216367111222333"));
}
@@ -185,6 +184,22 @@ class OafMapperUtilsTest {
.getClassid());
}
+ @Test
+ void testDelegatedAuthority() throws IOException {
+ Dataset d1 = read("dataset_2.json", Dataset.class);
+ Dataset d2 = read("dataset_delegated.json", Dataset.class);
+
+ assertEquals(1, d2.getCollectedfrom().size());
+ assertTrue(cfId(d2.getCollectedfrom()).contains(ModelConstants.ZENODO_OD_ID));
+
+ Result res = OafMapperUtils.mergeResults(d1, d2);
+
+ assertEquals(d2, res);
+
+ System.out.println(OBJECT_MAPPER.writeValueAsString(res));
+
+ }
+
protected HashSet cfId(List collectedfrom) {
return collectedfrom.stream().map(KeyValue::getKey).collect(Collectors.toCollection(HashSet::new));
}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRuleTest.java
new file mode 100644
index 000000000..3736033c3
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PICCleaningRuleTest.java
@@ -0,0 +1,19 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class PICCleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("887624982", PICCleaningRule.clean("887624982"));
+ assertEquals("", PICCleaningRule.clean("887 624982"));
+ assertEquals("887624982", PICCleaningRule.clean(" 887624982 "));
+ assertEquals("887624982", PICCleaningRule.clean(" 887624982x "));
+ assertEquals("887624982", PICCleaningRule.clean(" 88762498200 "));
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRuleTest.java
new file mode 100644
index 000000000..e53ebae89
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmcCleaningRuleTest.java
@@ -0,0 +1,19 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class PmcCleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("PMC1234", PmcCleaningRule.clean("PMC1234"));
+ assertEquals("PMC1234", PmcCleaningRule.clean(" PMC1234"));
+ assertEquals("PMC12345678", PmcCleaningRule.clean("PMC12345678"));
+ assertEquals("PMC12345678", PmcCleaningRule.clean("PMC123456789"));
+ assertEquals("PMC12345678", PmcCleaningRule.clean("PMC 12345678"));
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRuleTest.java
new file mode 100644
index 000000000..9562adf7e
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/PmidCleaningRuleTest.java
@@ -0,0 +1,18 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class PmidCleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("1234", PmidCleaningRule.clean("01234"));
+ assertEquals("1234567", PmidCleaningRule.clean("0123 4567"));
+ assertEquals("123", PmidCleaningRule.clean("0123x4567"));
+ assertEquals("", PmidCleaningRule.clean("abc"));
+ }
+
+}
diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRuleTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRuleTest.java
new file mode 100644
index 000000000..5d5c03959
--- /dev/null
+++ b/dhp-common/src/test/java/eu/dnetlib/dhp/schema/oaf/utils/RorCleaningRuleTest.java
@@ -0,0 +1,17 @@
+
+package eu.dnetlib.dhp.schema.oaf.utils;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class RorCleaningRuleTest {
+
+ @Test
+ void testCleaning() {
+ assertEquals("https://ror.org/05rpz9w55", RorCleaningRule.clean("https://ror.org/05rpz9w55"));
+ assertEquals("https://ror.org/05rpz9w55", RorCleaningRule.clean("05rpz9w55"));
+ assertEquals("", RorCleaningRule.clean("05rpz9w_55"));
+ }
+
+}
diff --git a/dhp-common/src/test/resources/eu/dnetlib/dhp/oa/merge/publications_with_authors.json b/dhp-common/src/test/resources/eu/dnetlib/dhp/oa/merge/publications_with_authors.json
deleted file mode 100644
index 600181ba5..000000000
--- a/dhp-common/src/test/resources/eu/dnetlib/dhp/oa/merge/publications_with_authors.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{ "journal":{ "dataInfo":null, "conferenceplace":null, "issnPrinted":"0009-9260", "issnOnline":null, "issnLinking":null, "ep":"636", "iss":null, "sp":"632", "vol":"55", "edition":null, "conferencedate":null, "name":"Clinical Radiology" }, "measures":null, "author":[ { "rank":null, "fullname":"KARL TURETSCHEK", "affiliation":null, "pid":null, "surname":"TURETSCHEK", "name":"KARL" }, { "rank":null, "fullname":"WOLFGANG EBNER", "affiliation":null, "pid":null, "surname":"EBNER", "name":"WOLFGANG" }, { "rank":null, "fullname":"DOMINIK FLEISCHMANN", "affiliation":null, "pid":null, "surname":"FLEISCHMANN", "name":"DOMINIK" }, { "rank":null, "fullname":"PATRICK WUNDERBALDINGER", "affiliation":null, "pid":null, "surname":"WUNDERBALDINGER", "name":"PATRICK" }, { "rank":null, "fullname":"LUDWIG ERLACHER", "affiliation":null, "pid":null, "surname":"ERLACHER", "name":"LUDWIG" }, { "rank":null, "fullname":"THOMAS ZONTSICH", "affiliation":null, "pid":null, "surname":"ZONTSICH", "name":"THOMAS" }, { "rank":null, "fullname":"ALEXANDER A. BANKIER", "affiliation":null, "pid":null, "surname":"BANKIER", "name":"ALEXANDER A." } ], "resulttype":{ "classid":"publication", "schemeid":"dnet:result_typologies", "schemename":"dnet:result_typologies", "classname":"publication"}, "title":[ { "qualifier":{ "classid":"main title", "schemeid":"dnet:dataCite_title", "schemename":"dnet:dataCite_title", "classname":"main title" }, "dataInfo":null, "value":"Early Pulmonary Involvement in Ankylosing Spondylitis: Assessment With Thin-section CT" } ], "relevantdate":[ { "qualifier":{ "classid":"created", "schemeid":"dnet:dataCite_date", "schemename":"dnet:dataCite_date", "classname":"created" }, "dataInfo":null, "value":"2002-09-19T13:54:50Z" } ], "dateofacceptance":{ "dataInfo":null, "value":"2002-09-19T13:54:50Z" }, "publisher":{ "dataInfo":null, "value":"Elsevier BV" }, "embargoenddate":null, "fulltext":null, "contributor":null, "resourcetype":{ "classid":"0001", "schemeid":"dnet:dataCite_resource", "schemename":"dnet:dataCite_resource", "classname":"0001"}, "coverage":null, "bestaccessright":null, "externalReference":null, "format":null, "description":[ ], "source":[ { "dataInfo":null, "value":"Crossref" } ], "subject":[ { "qualifier":{ "classid":"keywords", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"keywords" }, "dataInfo":null, "value":"Radiology Nuclear Medicine and imaging" }, { "qualifier":{ "classid":"keywords", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"keywords" }, "dataInfo":null, "value":"General Medicine" } ], "language":null, "instance":[ { "processingchargecurrency":null, "refereed":null, "instancetype":{ "classid":"0001", "schemeid":"dnet:publication_resource", "schemename":"dnet:publication_resource", "classname":"Article" }, "hostedby":null, "distributionlocation":null, "processingchargeamount":null, "license":{ "dataInfo":null, "value":"https://www.elsevier.com/tdm/userlicense/1.0/" }, "accessright":{ "classid":"RESTRICTED", "schemeid":"dnet:access_modes", "schemename":"dnet:access_modes", "classname":"Restricted" }, "dateofacceptance":{ "dataInfo":null, "value":"2002-09-19T13:54:50Z" }, "collectedfrom":{ "dataInfo":null, "value":"Crossref", "key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2" }, "url":[ "https://api.elsevier.com/content/article/PII:S0009926000904987?httpAccept=text/xml", "https://api.elsevier.com/content/article/PII:S0009926000904987?httpAccept=text/plain", "http://dx.doi.org/10.1053/crad.2000.0498" ] } ], "context":null, "country":null, "originalId":[ "S0009926000904987", "10.1053/crad.2000.0498" ], "pid":[ { "qualifier":{ "classid":"doi", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"doi" }, "dataInfo":null, "value":"10.1053/crad.2000.0498" } ], "dateofcollection":"2020-02-06T20:40:22Z", "dateoftransformation":null, "oaiprovenance":null, "extraInfo":null, "id":"50|doiboost____::994b7e47b9e225ab6d5e14841cb45a7f", "collectedfrom":[ { "dataInfo":null, "value":"Crossref", "key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2" } ], "dataInfo":{ "trust":"0.9", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "lastupdatetimestamp":1581021622595 }
-{ "journal":null, "measures":null, "author":[ { "rank":null, "fullname":"Dominik Fleischmann", "affiliation":null, "pid":[ { "qualifier":{ "classid":"ORCID", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"ORCID" }, "dataInfo":{ "trust":"0.91", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:crosswalk:entityregistry", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"Harvested"} }, "value":"0000-0003-0715-0952" } ], "surname":"Fleischmann", "name":"Dominik" } ], "resulttype":{ "classid":"publication", "schemeid":"dnet:result_typologies", "schemename":"dnet:result_typologies", "classname":"publication"}, "title":[ ], "relevantdate":[ ], "dateofacceptance":null, "publisher":null, "embargoenddate":null, "fulltext":[ ], "contributor":[ ], "resourcetype":null, "coverage":[ ], "bestaccessright":null, "externalReference":[ ], "format":[ ], "description":null, "source":[ ], "subject":[ ], "language":null, "instance":[ ], "context":[ ], "country":[ ], "originalId":[ ], "pid":[ { "qualifier":{ "classid":"doi", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"doi"}, "dataInfo":null, "value":"10.1053/crad.2000.0498" } ], "dateofcollection":null, "dateoftransformation":null, "oaiprovenance":null, "extraInfo":[ ], "id":"50|doiboost____::994b7e47b9e225ab6d5e14841cb45a7f", "collectedfrom":[ { "dataInfo":null, "value":"ORCID", "key":"10|openaire____::806360c771262b4d6770e7cdf04b5c5a" } ], "dataInfo":{ "trust":"0.9", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "lastupdatetimestamp":null }
-{ "journal":{ "dataInfo":null, "conferenceplace":null, "issnPrinted":"0009-9260", "issnOnline":null, "issnLinking":null, "ep":"636", "iss":"8", "sp":"632", "vol":"55", "edition":null, "conferencedate":null, "name":"Clinical Radiology" }, "measures":null, "author":[ { "rank":null, "fullname":"T. Zontsich", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL"}, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/1966908432" } ], "surname":null, "name":null }, { "rank":null, "fullname":"L Erlacher", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL"}, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/687931320" } ], "surname":null, "name":null }, { "rank":null, "fullname":"Dominik Fleischmann", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL"}, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/2156559961" } ], "surname":null, "name":null }, { "rank":null, "fullname":"Alexander A. Bankier", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL"}, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/1107971609" } ], "surname":null, "name":null }, { "rank":null, "fullname":"Patrick Wunderbaldinger", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL" }, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/2422340537" } ], "surname":null, "name":null }, { "rank":null, "fullname":"Wolfgang Ebner", "affiliation":null, "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL" }, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/2186462571" } ], "surname":null, "name":null }, { "rank":null, "fullname":"K. Turetschek", "affiliation":[ { "dataInfo":null, "value":"University of Vienna" } ], "pid":[ { "qualifier":{ "classid":"URL", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"URL" }, "dataInfo":null, "value":"https://academic.microsoft.com/#/detail/321765676" } ], "surname":null, "name":null } ], "resulttype":{ "classid":"publication", "schemeid":"dnet:result_typologies", "schemename":"dnet:result_typologies", "classname":"publication" }, "title":[ { "qualifier":{ "classid":"main title", "schemeid":"dnet:dataCite_title", "schemename":"dnet:dataCite_title", "classname":"main title" }, "dataInfo":null, "value":"early pulmonary involvement in ankylosing spondylitis assessment with thin section ct" }, { "qualifier":{ "classid":"alternative title", "schemeid":"dnet:dataCite_title", "schemename":"dnet:dataCite_title", "classname":"alternative title" }, "dataInfo":null, "value":"Early pulmonary involvement in ankylosing spondylitis: assessment with thin-section CT." } ], "relevantdate":null, "dateofacceptance":{ "dataInfo":null, "value":"2000-08-01" }, "publisher":{ "dataInfo":null, "value":"Elsevier" }, "embargoenddate":null, "fulltext":null, "contributor":null, "resourcetype":null, "coverage":null, "bestaccessright":null, "externalReference":null, "format":null, "description":[ { "dataInfo":null, "value":"Abstract AIM: To determine the frequency and the distribution of early pulmonary lesions in patients with ankylosing spondylitis (AS) and a normal chest X-ray on thin-section CT and to correlate the CT findings with the results of pulmonary function tests and clinical data. MATERIALS AND METHODS: Twenty-five patients with clinically proven AS and no history of smoking underwent clinical examinations, pulmonary function tests (PFT), chest radiography, and thin-section CT. Four of 25 patients (16%), who had obvious signs on plain films suggestive of pre-existing disorders unrelated to AS were excluded. RESULTS: Fifteen of 21 patients (71%) had abnormalities on thin-section CT. The most frequent abnormalities were thickening of the interlobular septa in seven of 21 patients (33%), mild bronchial wall thickening in (6/21, 29%), pleural thickening and pleuropulmonary irregularities (both 29%) and linear septal thickening (6/21, 29%). In six patients there were no signs of pleuropulmonary involvement. Eight of 15 patients (53%) with abnormal and four of six patients (67%) with normal CT findings revealed mild restrictive lung function impairment. CONCLUSION: Patients with AS but a normal chest radiograph frequently have abnormalities on thin-section CT. As these abnormalities are usually subtle and their extent does not correlate with functional and clinical data, the overall routine impact of thin-section CT in the diagnosis of AS is limited. Turetschek, K , (2000) Clinical Radiology53, 632–636." } ], "source":[ { "dataInfo":null, "value":null } ], "subject":[ { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Complication" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Chest radiograph" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.580897", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine.diagnostic_test" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.580897", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"In patient" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Radiography" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.4582326", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"business.industry" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.4582326", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"business" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Thin section ct" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Respiratory disease" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.49358836", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine.disease" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.49358836", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Ankylosing spondylitis" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.49937168", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine.disease" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.49937168", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Radiology" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.4573571", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine.medical_specialty" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.4573571", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"medicine" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Medicine" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.40295774", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"business.industry" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":{ "trust":"0.40295774", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset" } }, "value":"business" }, { "qualifier":{ "classid":"MAG", "schemeid":"dnet:subject_classification_typologies", "schemename":"dnet:subject_classification_typologies", "classname":"Microsoft Academic Graph classification" }, "dataInfo":null, "value":"Pulmonary function testing" } ], "language":null, "instance":[ { "processingchargecurrency":null, "refereed":null, "instancetype":null, "hostedby":null, "distributionlocation":null, "processingchargeamount":null, "license":null, "accessright":null, "dateofacceptance":null, "collectedfrom":{ "dataInfo":null, "value":"Microsoft Academic Graph", "key":"10|openaire____::5f532a3fc4f1ea403f37070f59a7a53a" }, "url":[ "https://www.ncbi.nlm.nih.gov/pubmed/10964736", "https://www.sciencedirect.com/science/article/pii/S0009926000904987", "https://academic.microsoft.com/#/detail/1990704599" ] } ], "context":null, "country":null, "originalId":[ "1990704599", "10.1053/crad.2000.0498" ], "pid":[ { "qualifier":{ "classid":"doi", "schemeid":"dnet:pid_types", "schemename":"dnet:pid_types", "classname":"doi" }, "dataInfo":null, "value":"10.1053/crad.2000.0498" } ], "dateofcollection":null, "dateoftransformation":null, "oaiprovenance":null, "extraInfo":null, "id":"50|doiboost____::994b7e47b9e225ab6d5e14841cb45a7f", "collectedfrom":[ { "dataInfo":null, "value":"Microsoft Academic Graph", "key":"10|openaire____::5f532a3fc4f1ea403f37070f59a7a53a" } ], "dataInfo":{ "trust":"0.9", "invisible":false, "inferred":false, "deletedbyinference":false, "inferenceprovenance":null, "provenanceaction":{ "classid":"sysimport:actionset", "schemeid":"dnet:provenanceActions", "schemename":"dnet:provenanceActions", "classname":"sysimport:actionset"} }, "lastupdatetimestamp":null }
\ No newline at end of file
diff --git a/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_2.json b/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_2.json
index 52e4e126a..c880edb7d 100644
--- a/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_2.json
+++ b/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_2.json
@@ -1 +1,140 @@
-{"id":"50|DansKnawCris::0829b5191605bdbea36d6502b8c1ce1g", "resuttype" : { "classid" : "dataset" }, "pid":[{"qualifier":{"classid":"doi"},"value":"10.1016/j.cmet.2011.03.013"},{"qualifier":{"classid":"urn"},"value":"urn:nbn:nl:ui:29-f3ed5f9e-edf6-457e-8848-61b58a4075e2"},{"qualifier":{"classid":"scp-number"},"value":"79953761260"},{"qualifier":{"classid":"pmc"},"value":"21459329"}], "collectedfrom" : [ { "key" : "10|openaire____::081b82f96300b6a6e3d282bad31cb6e3", "value" : "Repository B"} ]}
\ No newline at end of file
+{
+ "id": "50|DansKnawCris::0829b5191605bdbea36d6502b8c1ce1g",
+ "resuttype": {"classid": "dataset"},
+ "pid": [
+ {
+ "qualifier": {"classid": "doi"},
+ "value": "10.1016/j.cmet.2011.03.013"
+ },
+ {
+ "qualifier": {"classid": "urn"},
+ "value": "urn:nbn:nl:ui:29-f3ed5f9e-edf6-457e-8848-61b58a4075e2"
+ },
+ {
+ "qualifier": {"classid": "scp-number"},
+ "value": "79953761260"
+ },
+ {
+ "qualifier": {"classid": "pmc"},
+ "value": "21459329"
+ }
+ ],
+ "collectedfrom": [
+ {
+ "key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e3",
+ "value": "Repository B"
+ }
+ ],
+ "instance": [
+ {
+ "refereed": {
+ "classid": "0000",
+ "classname": "UNKNOWN",
+ "schemeid": "dnet:review_levels",
+ "schemename": "dnet:review_levels"
+ },
+ "hostedby": {
+ "key": "10|opendoar____::358aee4cc897452c00244351e4d91f69",
+ "value": "Zenodo"
+ },
+ "accessright": {
+ "classid": "OPEN",
+ "classname": "Open Access",
+ "schemeid": "dnet:access_modes",
+ "schemename": "dnet:access_modes"
+ },
+ "processingchargecurrency": {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "value": "EUR"
+ },
+ "pid": [
+ {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "qualifier": {
+ "classid": "doi",
+ "classname": "Digital Object Identifier",
+ "schemeid": "dnet:pid_types",
+ "schemename": "dnet:pid_types"
+ },
+ "value": "10.1371/journal.pone.0085605"
+ }
+ ],
+ "distributionlocation": "",
+ "url": ["https://doi.org/10.1371/journal.pone.0085605"],
+ "alternateIdentifier": [
+ {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "qualifier": {
+ "classid": "pmid",
+ "classname": "PubMed ID",
+ "schemeid": "dnet:pid_types",
+ "schemename": "dnet:pid_types"
+ },
+ "value": "24454899.0"
+ }
+ ],
+ "collectedfrom": {
+ "key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e3",
+ "value": "Repository B"
+ },
+ "processingchargeamount": {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "value": "1022.02"
+ },
+ "instancetype": {
+ "classid": "0004",
+ "classname": "Conference object",
+ "schemeid": "dnet:publication_resource",
+ "schemename": "dnet:publication_resource"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_delegated.json b/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_delegated.json
new file mode 100644
index 000000000..967c1181b
--- /dev/null
+++ b/dhp-common/src/test/resources/eu/dnetlib/dhp/schema/oaf/utils/dataset_delegated.json
@@ -0,0 +1,140 @@
+{
+ "id": "50|DansKnawCris::0829b5191605bdbea36d6502b8c1ce1g",
+ "resuttype": {"classid": "dataset"},
+ "pid": [
+ {
+ "qualifier": {"classid": "doi"},
+ "value": "10.1016/j.cmet.2011.03.013"
+ },
+ {
+ "qualifier": {"classid": "urn"},
+ "value": "urn:nbn:nl:ui:29-f3ed5f9e-edf6-457e-8848-61b58a4075e2"
+ },
+ {
+ "qualifier": {"classid": "scp-number"},
+ "value": "79953761260"
+ },
+ {
+ "qualifier": {"classid": "pmc"},
+ "value": "21459329"
+ }
+ ],
+ "collectedfrom": [
+ {
+ "key": "10|opendoar____::358aee4cc897452c00244351e4d91f69",
+ "value": "Zenodo"
+ }
+ ],
+ "instance": [
+ {
+ "refereed": {
+ "classid": "0000",
+ "classname": "UNKNOWN",
+ "schemeid": "dnet:review_levels",
+ "schemename": "dnet:review_levels"
+ },
+ "hostedby": {
+ "key": "10|opendoar____::358aee4cc897452c00244351e4d91f69",
+ "value": "Zenodo"
+ },
+ "accessright": {
+ "classid": "OPEN",
+ "classname": "Open Access",
+ "schemeid": "dnet:access_modes",
+ "schemename": "dnet:access_modes"
+ },
+ "processingchargecurrency": {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "value": "EUR"
+ },
+ "pid": [
+ {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "qualifier": {
+ "classid": "doi",
+ "classname": "Digital Object Identifier",
+ "schemeid": "dnet:pid_types",
+ "schemename": "dnet:pid_types"
+ },
+ "value": "10.1371/journal.pone.0085605"
+ }
+ ],
+ "distributionlocation": "",
+ "url": ["https://doi.org/10.1371/journal.pone.0085605"],
+ "alternateIdentifier": [
+ {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "qualifier": {
+ "classid": "pmid",
+ "classname": "PubMed ID",
+ "schemeid": "dnet:pid_types",
+ "schemename": "dnet:pid_types"
+ },
+ "value": "24454899.0"
+ }
+ ],
+ "collectedfrom": {
+ "key": "10|opendoar____::358aee4cc897452c00244351e4d91f69",
+ "value": "Zenodo"
+ },
+ "processingchargeamount": {
+ "dataInfo": {
+ "provenanceaction": {
+ "classid": "sysimport:crosswalk:datasetarchive",
+ "classname": "Harvested",
+ "schemeid": "dnet:provenanceActions",
+ "schemename": "dnet:provenanceActions"
+ },
+ "deletedbyinference": false,
+ "inferred": false,
+ "inferenceprovenance": "",
+ "invisible": true,
+ "trust": "0.9"
+ },
+ "value": "1022.02"
+ },
+ "instancetype": {
+ "classid": "0004",
+ "classname": "Conference object",
+ "schemeid": "dnet:publication_resource",
+ "schemename": "dnet:publication_resource"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/dhp-pace-core/pom.xml b/dhp-pace-core/pom.xml
new file mode 100644
index 000000000..fd7f44fc9
--- /dev/null
+++ b/dhp-pace-core/pom.xml
@@ -0,0 +1,110 @@
+
+
+
+ 4.0.0
+
+
+ eu.dnetlib.dhp
+ dhp
+ 1.2.5-SNAPSHOT
+ ../pom.xml
+
+
+ eu.dnetlib.dhp
+ dhp-pace-core
+ 1.2.5-SNAPSHOT
+ jar
+
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ ${net.alchim31.maven.version}
+
+
+ scala-compile-first
+ initialize
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+
+ true
+ ${scala.binary.version}
+ ${scala.version}
+
+
+
+
+
+
+
+
+ edu.cmu
+ secondstring
+
+
+ com.google.guava
+ guava
+
+
+ com.google.code.gson
+ gson
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ commons-io
+ commons-io
+
+
+ org.antlr
+ stringtemplate
+
+
+ commons-logging
+ commons-logging
+
+
+ org.reflections
+ reflections
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ org.apache.commons
+ commons-math3
+
+
+ com.jayway.jsonpath
+ json-path
+
+
+ com.ibm.icu
+ icu4j
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+
+
+
+
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/AbstractClusteringFunction.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/AbstractClusteringFunction.java
new file mode 100644
index 000000000..3da8eb490
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/AbstractClusteringFunction.java
@@ -0,0 +1,46 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+
+import eu.dnetlib.pace.common.AbstractPaceFunctions;
+import eu.dnetlib.pace.config.Config;
+
+public abstract class AbstractClusteringFunction extends AbstractPaceFunctions implements ClusteringFunction {
+
+ protected Map params;
+
+ public AbstractClusteringFunction(final Map params) {
+ this.params = params;
+ }
+
+ protected abstract Collection doApply(Config conf, String s);
+
+ @Override
+ public Collection apply(Config conf, List fields) {
+ return fields
+ .stream()
+ .filter(f -> !f.isEmpty())
+ .map(this::normalize)
+ .map(s -> filterAllStopWords(s))
+ .map(s -> doApply(conf, s))
+ .map(c -> filterBlacklisted(c, ngramBlacklist))
+ .flatMap(c -> c.stream())
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toCollection(HashSet::new));
+ }
+
+ public Map getParams() {
+ return params;
+ }
+
+ protected Integer param(String name) {
+ return params.get(name);
+ }
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Acronyms.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Acronyms.java
new file mode 100644
index 000000000..9072fbb4b
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Acronyms.java
@@ -0,0 +1,51 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("acronyms")
+public class Acronyms extends AbstractClusteringFunction {
+
+ public Acronyms(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return extractAcronyms(s, param("max"), param("minLen"), param("maxLen"));
+ }
+
+ private Set extractAcronyms(final String s, int maxAcronyms, int minLen, int maxLen) {
+
+ final Set acronyms = Sets.newLinkedHashSet();
+
+ for (int i = 0; i < maxAcronyms; i++) {
+
+ final StringTokenizer st = new StringTokenizer(s);
+ final StringBuilder sb = new StringBuilder();
+
+ while (st.hasMoreTokens()) {
+ final String token = st.nextToken();
+ if (sb.length() > maxLen) {
+ break;
+ }
+ if (token.length() > 1 && i < token.length()) {
+ sb.append(token.charAt(i));
+ }
+ }
+ String acronym = sb.toString();
+ if (acronym.length() > minLen) {
+ acronyms.add(acronym);
+ }
+ }
+ return acronyms;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringClass.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringClass.java
new file mode 100644
index 000000000..3bb845b15
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringClass.java
@@ -0,0 +1,14 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ClusteringClass {
+
+ public String value();
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringFunction.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringFunction.java
new file mode 100644
index 000000000..8b7852418
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ClusteringFunction.java
@@ -0,0 +1,16 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import eu.dnetlib.pace.config.Config;
+
+public interface ClusteringFunction {
+
+ public Collection apply(Config config, List fields);
+
+ public Map getParams();
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ImmutableFieldValue.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ImmutableFieldValue.java
new file mode 100644
index 000000000..bc8844aee
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/ImmutableFieldValue.java
@@ -0,0 +1,28 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("immutablefieldvalue")
+public class ImmutableFieldValue extends AbstractClusteringFunction {
+
+ public ImmutableFieldValue(final Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, final String s) {
+ final List res = Lists.newArrayList();
+
+ res.add(s);
+
+ return res;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/KeywordsClustering.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/KeywordsClustering.java
new file mode 100644
index 000000000..38299adb4
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/KeywordsClustering.java
@@ -0,0 +1,54 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("keywordsclustering")
+public class KeywordsClustering extends AbstractClusteringFunction {
+
+ public KeywordsClustering(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, String s) {
+
+ // takes city codes and keywords codes without duplicates
+ Set keywords = getKeywords(s, conf.translationMap(), params.getOrDefault("windowSize", 4));
+ Set cities = getCities(s, params.getOrDefault("windowSize", 4));
+
+ // list of combination to return as result
+ final Collection combinations = new LinkedHashSet();
+
+ for (String keyword : keywordsToCodes(keywords, conf.translationMap())) {
+ for (String city : citiesToCodes(cities)) {
+ combinations.add(keyword + "-" + city);
+ if (combinations.size() >= params.getOrDefault("max", 2)) {
+ return combinations;
+ }
+ }
+ }
+
+ return combinations;
+ }
+
+ @Override
+ public Collection apply(final Config conf, List fields) {
+ return fields
+ .stream()
+ .filter(f -> !f.isEmpty())
+ .map(this::cleanup)
+ .map(this::normalize)
+ .map(s -> filterAllStopWords(s))
+ .map(s -> doApply(conf, s))
+ .map(c -> filterBlacklisted(c, ngramBlacklist))
+ .flatMap(c -> c.stream())
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toCollection(HashSet::new));
+ }
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LastNameFirstInitial.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LastNameFirstInitial.java
new file mode 100644
index 000000000..5a385961a
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LastNameFirstInitial.java
@@ -0,0 +1,79 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+import eu.dnetlib.pace.model.Person;
+
+@ClusteringClass("lnfi")
+public class LastNameFirstInitial extends AbstractClusteringFunction {
+
+ private boolean DEFAULT_AGGRESSIVE = true;
+
+ public LastNameFirstInitial(final Map params) {
+ super(params);
+ }
+
+ @Override
+ public Collection apply(Config conf, List fields) {
+ return fields
+ .stream()
+ .filter(f -> !f.isEmpty())
+ .map(this::normalize)
+ .map(s -> doApply(conf, s))
+ .map(c -> filterBlacklisted(c, ngramBlacklist))
+ .flatMap(c -> c.stream())
+ .filter(StringUtils::isNotBlank)
+ .collect(Collectors.toCollection(HashSet::new));
+ }
+
+ @Override
+ protected String normalize(final String s) {
+ return fixAliases(transliterate(nfd(unicodeNormalization(s))))
+ // do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
+ // strings
+ .replaceAll("[^ \\w]+", "")
+ .replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
+ .replaceAll("(\\p{Punct})+", " ")
+ .replaceAll("(\\d)+", " ")
+ .replaceAll("(\\n)+", " ")
+ .trim();
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, final String s) {
+
+ final List res = Lists.newArrayList();
+
+ final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
+ : DEFAULT_AGGRESSIVE);
+
+ Person p = new Person(s, aggressive);
+
+ if (p.isAccurate()) {
+ String lastName = p.getNormalisedSurname().toLowerCase();
+ String firstInitial = p.getNormalisedFirstName().toLowerCase().substring(0, 1);
+
+ res.add(firstInitial.concat(lastName));
+ } else { // is not accurate, meaning it has no defined name and surname
+ List fullname = Arrays.asList(p.getNormalisedFullname().split(" "));
+ if (fullname.size() == 1) {
+ res.add(p.getNormalisedFullname().toLowerCase());
+ } else if (fullname.size() == 2) {
+ res.add(fullname.get(0).substring(0, 1).concat(fullname.get(1)).toLowerCase());
+ res.add(fullname.get(1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
+ } else {
+ res.add(fullname.get(0).substring(0, 1).concat(fullname.get(fullname.size() - 1)).toLowerCase());
+ res.add(fullname.get(fullname.size() - 1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
+ }
+ }
+
+ return res;
+ }
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LowercaseClustering.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LowercaseClustering.java
new file mode 100644
index 000000000..a3a6c4881
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/LowercaseClustering.java
@@ -0,0 +1,38 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("lowercase")
+public class LowercaseClustering extends AbstractClusteringFunction {
+
+ public LowercaseClustering(final Map params) {
+ super(params);
+ }
+
+ @Override
+ public Collection apply(Config conf, List fields) {
+ Collection c = Sets.newLinkedHashSet();
+ for (String f : fields) {
+ c.addAll(doApply(conf, f));
+ }
+ return c;
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, final String s) {
+ if (StringUtils.isBlank(s)) {
+ return Lists.newArrayList();
+ }
+ return Lists.newArrayList(s.toLowerCase().trim());
+ }
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NGramUtils.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NGramUtils.java
new file mode 100644
index 000000000..6ee80b86e
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NGramUtils.java
@@ -0,0 +1,24 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+
+import eu.dnetlib.pace.common.AbstractPaceFunctions;
+
+public class NGramUtils extends AbstractPaceFunctions {
+ static private final NGramUtils NGRAMUTILS = new NGramUtils();
+
+ private static final int SIZE = 100;
+
+ private static final Set stopwords = AbstractPaceFunctions
+ .loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
+
+ public static String cleanupForOrdering(String s) {
+ return (NGRAMUTILS.filterStopWords(NGRAMUTILS.normalize(s), stopwords) + StringUtils.repeat(" ", SIZE))
+ .substring(0, SIZE)
+ .replaceAll(" ", "");
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NgramPairs.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NgramPairs.java
new file mode 100644
index 000000000..aa06aa408
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/NgramPairs.java
@@ -0,0 +1,41 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("ngrampairs")
+public class NgramPairs extends Ngrams {
+
+ public NgramPairs(Map params) {
+ super(params, false);
+ }
+
+ public NgramPairs(Map params, boolean sorted) {
+ super(params, sorted);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return ngramPairs(Lists.newArrayList(getNgrams(s, param("ngramLen"), param("max") * 2, 1, 2)), param("max"));
+ }
+
+ protected Collection ngramPairs(final List ngrams, int maxNgrams) {
+ Collection res = Lists.newArrayList();
+ int j = 0;
+ for (int i = 0; i < ngrams.size() && res.size() < maxNgrams; i++) {
+ if (++j >= ngrams.size()) {
+ break;
+ }
+ res.add(ngrams.get(i) + ngrams.get(j));
+ // System.out.println("-- " + concatNgrams);
+ }
+ return res;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Ngrams.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Ngrams.java
new file mode 100644
index 000000000..96c305a16
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/Ngrams.java
@@ -0,0 +1,52 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.*;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("ngrams")
+public class Ngrams extends AbstractClusteringFunction {
+
+ private final boolean sorted;
+
+ public Ngrams(Map params) {
+ this(params, false);
+ }
+
+ public Ngrams(Map params, boolean sorted) {
+ super(params);
+ this.sorted = sorted;
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return getNgrams(s, param("ngramLen"), param("max"), param("maxPerToken"), param("minNgramLen"));
+ }
+
+ protected Collection getNgrams(String s, int ngramLen, int max, int maxPerToken, int minNgramLen) {
+
+ final Collection ngrams = sorted ? new TreeSet<>() : new LinkedHashSet();
+ final StringTokenizer st = new StringTokenizer(s);
+
+ while (st.hasMoreTokens()) {
+ final String token = st.nextToken();
+ if (!token.isEmpty()) {
+ for (int i = 0; i < maxPerToken && ngramLen + i <= token.length(); i++) {
+ String ngram = token.substring(i, Math.min(ngramLen + i, token.length())).trim();
+
+ if (ngram.length() >= minNgramLen) {
+ ngrams.add(ngram);
+
+ if (ngrams.size() >= max) {
+ return ngrams;
+ }
+ }
+ }
+ }
+ }
+ // System.out.println(ngrams + " n: " + ngrams.size());
+ return ngrams;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonClustering.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonClustering.java
new file mode 100644
index 000000000..b4a04ce65
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonClustering.java
@@ -0,0 +1,84 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.common.AbstractPaceFunctions;
+import eu.dnetlib.pace.config.Config;
+import eu.dnetlib.pace.model.Person;
+
+@ClusteringClass("personClustering")
+public class PersonClustering extends AbstractPaceFunctions implements ClusteringFunction {
+
+ private Map params;
+
+ private static final int MAX_TOKENS = 5;
+
+ public PersonClustering(final Map params) {
+ this.params = params;
+ }
+
+ @Override
+ public Collection apply(final Config conf, final List fields) {
+ final Set hashes = Sets.newHashSet();
+
+ for (final String f : fields) {
+
+ final Person person = new Person(f, false);
+
+ if (StringUtils.isNotBlank(person.getNormalisedFirstName())
+ && StringUtils.isNotBlank(person.getNormalisedSurname())) {
+ hashes.add(firstLC(person.getNormalisedFirstName()) + person.getNormalisedSurname().toLowerCase());
+ } else {
+ for (final String token1 : tokens(f, MAX_TOKENS)) {
+ for (final String token2 : tokens(f, MAX_TOKENS)) {
+ if (!token1.equals(token2)) {
+ hashes.add(firstLC(token1) + token2);
+ }
+ }
+ }
+ }
+ }
+
+ return hashes;
+ }
+
+// @Override
+// public Collection apply(final List fields) {
+// final Set hashes = Sets.newHashSet();
+//
+// for (final Field f : fields) {
+//
+// final GTAuthor gta = GTAuthor.fromOafJson(f.stringValue());
+//
+// final Author a = gta.getAuthor();
+//
+// if (StringUtils.isNotBlank(a.getFirstname()) && StringUtils.isNotBlank(a.getSecondnames())) {
+// hashes.add(firstLC(a.getFirstname()) + a.getSecondnames().toLowerCase());
+// } else {
+// for (final String token1 : tokens(f.stringValue(), MAX_TOKENS)) {
+// for (final String token2 : tokens(f.stringValue(), MAX_TOKENS)) {
+// if (!token1.equals(token2)) {
+// hashes.add(firstLC(token1) + token2);
+// }
+// }
+// }
+// }
+// }
+//
+// return hashes;
+// }
+
+ @Override
+ public Map getParams() {
+ return params;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonHash.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonHash.java
new file mode 100644
index 000000000..a3d58a9be
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/PersonHash.java
@@ -0,0 +1,34 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+import eu.dnetlib.pace.model.Person;
+
+@ClusteringClass("personHash")
+public class PersonHash extends AbstractClusteringFunction {
+
+ private boolean DEFAULT_AGGRESSIVE = false;
+
+ public PersonHash(final Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, final String s) {
+ final List res = Lists.newArrayList();
+
+ final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
+ : DEFAULT_AGGRESSIVE);
+
+ res.add(new Person(s, aggressive).hash());
+
+ return res;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/RandomClusteringFunction.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/RandomClusteringFunction.java
new file mode 100644
index 000000000..2aab926da
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/RandomClusteringFunction.java
@@ -0,0 +1,20 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.Map;
+
+import eu.dnetlib.pace.config.Config;
+
+public class RandomClusteringFunction extends AbstractClusteringFunction {
+
+ public RandomClusteringFunction(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, String s) {
+ return null;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SortedNgramPairs.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SortedNgramPairs.java
new file mode 100644
index 000000000..b085ae26d
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SortedNgramPairs.java
@@ -0,0 +1,31 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.*;
+
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("sortedngrampairs")
+public class SortedNgramPairs extends NgramPairs {
+
+ public SortedNgramPairs(Map params) {
+ super(params, false);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+
+ final List tokens = Lists.newArrayList(Splitter.on(" ").omitEmptyStrings().trimResults().split(s));
+
+ Collections.sort(tokens);
+
+ return ngramPairs(
+ Lists.newArrayList(getNgrams(Joiner.on(" ").join(tokens), param("ngramLen"), param("max") * 2, 1, 2)),
+ param("max"));
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SpaceTrimmingFieldValue.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SpaceTrimmingFieldValue.java
new file mode 100644
index 000000000..392aecc79
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SpaceTrimmingFieldValue.java
@@ -0,0 +1,34 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.collect.Lists;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("spacetrimmingfieldvalue")
+public class SpaceTrimmingFieldValue extends AbstractClusteringFunction {
+
+ public SpaceTrimmingFieldValue(final Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(final Config conf, final String s) {
+ final List res = Lists.newArrayList();
+
+ res
+ .add(
+ StringUtils.isBlank(s) ? RandomStringUtils.random(getParams().get("randomLength"))
+ : s.toLowerCase().replaceAll("\\s+", ""));
+
+ return res;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SuffixPrefix.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SuffixPrefix.java
new file mode 100644
index 000000000..2a1c023a9
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/SuffixPrefix.java
@@ -0,0 +1,42 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("suffixprefix")
+public class SuffixPrefix extends AbstractClusteringFunction {
+
+ public SuffixPrefix(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return suffixPrefix(s, param("len"), param("max"));
+ }
+
+ private Collection suffixPrefix(String s, int len, int max) {
+ final Set bigrams = Sets.newLinkedHashSet();
+ int i = 0;
+ while (++i < s.length() && bigrams.size() < max) {
+ int j = s.indexOf(" ", i);
+
+ int offset = j + len + 1 < s.length() ? j + len + 1 : s.length();
+
+ if (j - len > 0) {
+ String bigram = s.substring(j - len, offset).replaceAll(" ", "").trim();
+ if (bigram.length() >= 4) {
+ bigrams.add(bigram);
+ }
+ }
+ }
+ return bigrams;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/UrlClustering.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/UrlClustering.java
new file mode 100644
index 000000000..5b267ad10
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/UrlClustering.java
@@ -0,0 +1,52 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import eu.dnetlib.pace.common.AbstractPaceFunctions;
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("urlclustering")
+public class UrlClustering extends AbstractPaceFunctions implements ClusteringFunction {
+
+ protected Map params;
+
+ public UrlClustering(final Map params) {
+ this.params = params;
+ }
+
+ @Override
+ public Collection apply(final Config conf, List fields) {
+ try {
+ return fields
+ .stream()
+ .filter(f -> !f.isEmpty())
+ .map(this::asUrl)
+ .map(URL::getHost)
+ .collect(Collectors.toCollection(HashSet::new));
+ } catch (IllegalStateException e) {
+ return new HashSet<>();
+ }
+ }
+
+ @Override
+ public Map getParams() {
+ return null;
+ }
+
+ private URL asUrl(String value) {
+ try {
+ return new URL(value);
+ } catch (MalformedURLException e) {
+ // should not happen as checked by pace typing
+ throw new IllegalStateException("invalid URL: " + value);
+ }
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsStatsSuffixPrefixChain.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsStatsSuffixPrefixChain.java
new file mode 100644
index 000000000..c8e02f8f0
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsStatsSuffixPrefixChain.java
@@ -0,0 +1,91 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("wordsStatsSuffixPrefixChain")
+public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
+
+ public WordsStatsSuffixPrefixChain(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return suffixPrefixChain(s, param("mod"));
+ }
+
+ private Collection suffixPrefixChain(String s, int mod) {
+
+ // create the list of words from the string (remove short words)
+ List wordsList = Arrays
+ .stream(s.split(" "))
+ .filter(si -> si.length() > 3)
+ .collect(Collectors.toList());
+
+ final int words = wordsList.size();
+ final int letters = s.length();
+
+ // create the prefix: number of words + number of letters/mod
+ String prefix = words + "-" + letters / mod + "-";
+
+ return doSuffixPrefixChain(wordsList, prefix);
+
+ }
+
+ private Collection doSuffixPrefixChain(List wordsList, String prefix) {
+
+ Set set = Sets.newLinkedHashSet();
+ switch (wordsList.size()) {
+ case 0:
+ case 1:
+ break;
+ case 2:
+ set
+ .add(
+ prefix +
+ suffix(wordsList.get(0), 3) +
+ prefix(wordsList.get(1), 3));
+
+ set
+ .add(
+ prefix +
+ prefix(wordsList.get(0), 3) +
+ suffix(wordsList.get(1), 3));
+
+ break;
+ default:
+ set
+ .add(
+ prefix +
+ suffix(wordsList.get(0), 3) +
+ prefix(wordsList.get(1), 3) +
+ suffix(wordsList.get(2), 3));
+
+ set
+ .add(
+ prefix +
+ prefix(wordsList.get(0), 3) +
+ suffix(wordsList.get(1), 3) +
+ prefix(wordsList.get(2), 3));
+ break;
+ }
+
+ return set;
+
+ }
+
+ private String suffix(String s, int len) {
+ return s.substring(s.length() - len);
+ }
+
+ private String prefix(String s, int len) {
+ return s.substring(0, len);
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsSuffixPrefix.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsSuffixPrefix.java
new file mode 100644
index 000000000..e606590a5
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/clustering/WordsSuffixPrefix.java
@@ -0,0 +1,59 @@
+
+package eu.dnetlib.pace.clustering;
+
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.collect.Sets;
+
+import eu.dnetlib.pace.config.Config;
+
+@ClusteringClass("wordssuffixprefix")
+public class WordsSuffixPrefix extends AbstractClusteringFunction {
+
+ public WordsSuffixPrefix(Map params) {
+ super(params);
+ }
+
+ @Override
+ protected Collection doApply(Config conf, String s) {
+ return suffixPrefix(s, param("len"), param("max"));
+ }
+
+ private Collection suffixPrefix(String s, int len, int max) {
+
+ final int words = s.split(" ").length;
+
+ // adjust the token length according to the number of words
+ switch (words) {
+ case 1:
+ return Sets.newLinkedHashSet();
+ case 2:
+ return doSuffixPrefix(s, len + 2, max, words);
+ case 3:
+ return doSuffixPrefix(s, len + 1, max, words);
+ default:
+ return doSuffixPrefix(s, len, max, words);
+ }
+ }
+
+ private Collection doSuffixPrefix(String s, int len, int max, int words) {
+ final Set bigrams = Sets.newLinkedHashSet();
+ int i = 0;
+ while (++i < s.length() && bigrams.size() < max) {
+ int j = s.indexOf(" ", i);
+
+ int offset = j + len + 1 < s.length() ? j + len + 1 : s.length();
+
+ if (j - len > 0) {
+ String bigram = s.substring(j - len, offset).replaceAll(" ", "").trim();
+ if (bigram.length() >= 4) {
+ bigrams.add(words + bigram);
+ }
+ }
+ }
+ return bigrams;
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/common/AbstractPaceFunctions.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/common/AbstractPaceFunctions.java
new file mode 100644
index 000000000..b440686de
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/common/AbstractPaceFunctions.java
@@ -0,0 +1,357 @@
+
+package eu.dnetlib.pace.common;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
+import java.text.Normalizer;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.ibm.icu.text.Transliterator;
+
+import eu.dnetlib.pace.clustering.NGramUtils;
+
+/**
+ * Set of common functions for the framework
+ *
+ * @author claudio
+ */
+public abstract class AbstractPaceFunctions {
+
+ // city map to be used when translating the city names into codes
+ private static Map cityMap = AbstractPaceFunctions
+ .loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
+
+ // list of stopwords in different languages
+ protected static Set stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt");
+ protected static Set stopwords_en = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
+ protected static Set stopwords_de = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_de.txt");
+ protected static Set stopwords_es = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_es.txt");
+ protected static Set stopwords_fr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_fr.txt");
+ protected static Set stopwords_it = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_it.txt");
+ protected static Set stopwords_pt = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_pt.txt");
+
+ // transliterator
+ protected static Transliterator transliterator = Transliterator.getInstance("Any-Eng");
+
+ // blacklist of ngrams: to avoid generic keys
+ protected static Set ngramBlacklist = loadFromClasspath("/eu/dnetlib/pace/config/ngram_blacklist.txt");
+
+ // html regex for normalization
+ public static final Pattern HTML_REGEX = Pattern.compile("<[^>]*>");
+
+ private static final String alpha = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 ";
+ private static final String aliases_from = "⁰¹²³⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎àáâäæãåāèéêëēėęəîïíīįìôöòóœøōõûüùúūßśšłžźżçćčñń";
+ private static final String aliases_to = "0123456789+-=()n0123456789+-=()aaaaaaaaeeeeeeeeiiiiiioooooooouuuuussslzzzcccnn";
+
+ // doi prefix for normalization
+ public static final Pattern DOI_PREFIX = Pattern.compile("(https?:\\/\\/dx\\.doi\\.org\\/)|(doi:)");
+
+ private static Pattern numberPattern = Pattern.compile("-?\\d+(\\.\\d+)?");
+
+ private static Pattern hexUnicodePattern = Pattern.compile("\\\\u(\\p{XDigit}{4})");
+
+ protected String concat(final List l) {
+ return Joiner.on(" ").skipNulls().join(l);
+ }
+
+ protected String cleanup(final String s) {
+ final String s1 = HTML_REGEX.matcher(s).replaceAll("");
+ final String s2 = unicodeNormalization(s1.toLowerCase());
+ final String s3 = nfd(s2);
+ final String s4 = fixXML(s3);
+ final String s5 = s4.replaceAll("([0-9]+)", " $1 ");
+ final String s6 = transliterate(s5);
+ final String s7 = fixAliases(s6);
+ final String s8 = s7.replaceAll("[^\\p{ASCII}]", "");
+ final String s9 = s8.replaceAll("[\\p{Punct}]", " ");
+ final String s10 = s9.replaceAll("\\n", " ");
+ final String s11 = s10.replaceAll("(?m)\\s+", " ");
+ final String s12 = s11.trim();
+ return s12;
+ }
+
+ protected String fixXML(final String a) {
+
+ return a
+ .replaceAll("–", " ")
+ .replaceAll("&", " ")
+ .replaceAll(""", " ")
+ .replaceAll("−", " ");
+ }
+
+ protected boolean checkNumbers(final String a, final String b) {
+ final String numbersA = getNumbers(a);
+ final String numbersB = getNumbers(b);
+ final String romansA = getRomans(a);
+ final String romansB = getRomans(b);
+ return !numbersA.equals(numbersB) || !romansA.equals(romansB);
+ }
+
+ protected String getRomans(final String s) {
+ final StringBuilder sb = new StringBuilder();
+ for (final String t : s.split(" ")) {
+ sb.append(isRoman(t) ? t : "");
+ }
+ return sb.toString();
+ }
+
+ protected boolean isRoman(final String s) {
+ return s
+ .replaceAll("^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})$", "qwertyuiop")
+ .equals("qwertyuiop");
+ }
+
+ protected String getNumbers(final String s) {
+ final StringBuilder sb = new StringBuilder();
+ for (final String t : s.split(" ")) {
+ sb.append(isNumber(t) ? t : "");
+ }
+ return sb.toString();
+ }
+
+ public boolean isNumber(String strNum) {
+ if (strNum == null) {
+ return false;
+ }
+ return numberPattern.matcher(strNum).matches();
+ }
+
+ protected static String fixAliases(final String s) {
+ final StringBuilder sb = new StringBuilder();
+
+ s.chars().forEach(ch -> {
+ final int i = StringUtils.indexOf(aliases_from, ch);
+ sb.append(i >= 0 ? aliases_to.charAt(i) : (char) ch);
+ });
+
+ return sb.toString();
+ }
+
+ protected static String transliterate(final String s) {
+ try {
+ return transliterator.transliterate(s);
+ } catch (Exception e) {
+ return s;
+ }
+ }
+
+ protected String removeSymbols(final String s) {
+ final StringBuilder sb = new StringBuilder();
+
+ s.chars().forEach(ch -> {
+ sb.append(StringUtils.contains(alpha, ch) ? (char) ch : ' ');
+ });
+
+ return sb.toString().replaceAll("\\s+", " ");
+ }
+
+ protected boolean notNull(final String s) {
+ return s != null;
+ }
+
+ protected String normalize(final String s) {
+ return fixAliases(transliterate(nfd(unicodeNormalization(s))))
+ .toLowerCase()
+ // do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
+ // strings
+ .replaceAll("[^ \\w]+", "")
+ .replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
+ .replaceAll("(\\p{Punct})+", " ")
+ .replaceAll("(\\d)+", " ")
+ .replaceAll("(\\n)+", " ")
+ .trim();
+ }
+
+ public String nfd(final String s) {
+ return Normalizer.normalize(s, Normalizer.Form.NFD);
+ }
+
+ public String utf8(final String s) {
+ byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
+ return new String(bytes, StandardCharsets.UTF_8);
+ }
+
+ public String unicodeNormalization(final String s) {
+
+ Matcher m = hexUnicodePattern.matcher(s);
+ StringBuffer buf = new StringBuffer(s.length());
+ while (m.find()) {
+ String ch = String.valueOf((char) Integer.parseInt(m.group(1), 16));
+ m.appendReplacement(buf, Matcher.quoteReplacement(ch));
+ }
+ m.appendTail(buf);
+ return buf.toString();
+ }
+
+ protected String filterStopWords(final String s, final Set stopwords) {
+ final StringTokenizer st = new StringTokenizer(s);
+ final StringBuilder sb = new StringBuilder();
+ while (st.hasMoreTokens()) {
+ final String token = st.nextToken();
+ if (!stopwords.contains(token)) {
+ sb.append(token);
+ sb.append(" ");
+ }
+ }
+ return sb.toString().trim();
+ }
+
+ public String filterAllStopWords(String s) {
+
+ s = filterStopWords(s, stopwords_en);
+ s = filterStopWords(s, stopwords_de);
+ s = filterStopWords(s, stopwords_it);
+ s = filterStopWords(s, stopwords_fr);
+ s = filterStopWords(s, stopwords_pt);
+ s = filterStopWords(s, stopwords_es);
+ s = filterStopWords(s, stopwords_gr);
+
+ return s;
+ }
+
+ protected Collection filterBlacklisted(final Collection set, final Set ngramBlacklist) {
+ final Set newset = Sets.newLinkedHashSet();
+ for (final String s : set) {
+ if (!ngramBlacklist.contains(s)) {
+ newset.add(s);
+ }
+ }
+ return newset;
+ }
+
+ public static Set loadFromClasspath(final String classpath) {
+
+ Transliterator transliterator = Transliterator.getInstance("Any-Eng");
+
+ final Set h = Sets.newHashSet();
+ try {
+ for (final String s : IOUtils
+ .readLines(NGramUtils.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
+ h.add(fixAliases(transliterator.transliterate(s))); // transliteration of the stopwords
+ }
+ } catch (final Throwable e) {
+ return Sets.newHashSet();
+ }
+ return h;
+ }
+
+ public static Map loadMapFromClasspath(final String classpath) {
+
+ Transliterator transliterator = Transliterator.getInstance("Any-Eng");
+
+ final Map m = new HashMap<>();
+ try {
+ for (final String s : IOUtils
+ .readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
+ // string is like this: code;word1;word2;word3
+ String[] line = s.split(";");
+ String value = line[0];
+ for (int i = 1; i < line.length; i++) {
+ m.put(fixAliases(transliterator.transliterate(line[i].toLowerCase())), value);
+ }
+ }
+ } catch (final Throwable e) {
+ return new HashMap<>();
+ }
+ return m;
+ }
+
+ public String removeKeywords(String s, Set keywords) {
+
+ s = " " + s + " ";
+ for (String k : keywords) {
+ s = s.replaceAll(k.toLowerCase(), "");
+ }
+
+ return s.trim();
+ }
+
+ public double commonElementsPercentage(Set s1, Set s2) {
+
+ double longer = Math.max(s1.size(), s2.size());
+ return (double) s1.stream().filter(s2::contains).count() / longer;
+ }
+
+ // convert the set of keywords to codes
+ public Set toCodes(Set keywords, Map translationMap) {
+ return keywords.stream().map(s -> translationMap.get(s)).collect(Collectors.toSet());
+ }
+
+ public Set keywordsToCodes(Set keywords, Map translationMap) {
+ return toCodes(keywords, translationMap);
+ }
+
+ public Set citiesToCodes(Set keywords) {
+ return toCodes(keywords, cityMap);
+ }
+
+ protected String firstLC(final String s) {
+ return StringUtils.substring(s, 0, 1).toLowerCase();
+ }
+
+ protected Iterable tokens(final String s, final int maxTokens) {
+ return Iterables.limit(Splitter.on(" ").omitEmptyStrings().trimResults().split(s), maxTokens);
+ }
+
+ public String normalizePid(String pid) {
+ return DOI_PREFIX.matcher(pid.toLowerCase()).replaceAll("");
+ }
+
+ // get the list of keywords into the input string
+ public Set getKeywords(String s1, Map translationMap, int windowSize) {
+
+ String s = s1;
+
+ List tokens = Arrays.asList(s.toLowerCase().split(" "));
+
+ Set codes = new HashSet<>();
+
+ if (tokens.size() < windowSize)
+ windowSize = tokens.size();
+
+ int length = windowSize;
+
+ while (length != 0) {
+
+ for (int i = 0; i <= tokens.size() - length; i++) {
+ String candidate = concat(tokens.subList(i, i + length));
+ if (translationMap.containsKey(candidate)) {
+ codes.add(candidate);
+ s = s.replace(candidate, "").trim();
+ }
+ }
+
+ tokens = Arrays.asList(s.split(" "));
+ length -= 1;
+ }
+
+ return codes;
+ }
+
+ public Set getCities(String s1, int windowSize) {
+ return getKeywords(s1, cityMap, windowSize);
+ }
+
+ public static String readFromClasspath(final String filename, final Class clazz) {
+ final StringWriter sw = new StringWriter();
+ try {
+ IOUtils.copy(clazz.getResourceAsStream(filename), sw, StandardCharsets.UTF_8);
+ return sw.toString();
+ } catch (final IOException e) {
+ throw new RuntimeException("cannot load resource from classpath: " + filename);
+ }
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/Config.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/Config.java
new file mode 100644
index 000000000..4d823d129
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/Config.java
@@ -0,0 +1,53 @@
+
+package eu.dnetlib.pace.config;
+
+import java.util.List;
+import java.util.Map;
+import java.util.function.Predicate;
+
+import eu.dnetlib.pace.model.ClusteringDef;
+import eu.dnetlib.pace.model.FieldDef;
+import eu.dnetlib.pace.tree.support.TreeNodeDef;
+
+/**
+ * Interface for PACE configuration bean.
+ *
+ * @author claudio
+ */
+public interface Config {
+
+ /**
+ * Field configuration definitions.
+ *
+ * @return the list of definitions
+ */
+ public List model();
+
+ /**
+ * Decision Tree definition
+ *
+ * @return the map representing the decision tree
+ */
+ public Map decisionTree();
+
+ /**
+ * Clusterings.
+ *
+ * @return the list
+ */
+ public List clusterings();
+
+ /**
+ * Blacklists.
+ *
+ * @return the map
+ */
+ public Map> blacklists();
+
+ /**
+ * Translation map.
+ *
+ * @return the map
+ * */
+ public Map translationMap();
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/DedupConfig.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/DedupConfig.java
new file mode 100644
index 000000000..ac0ef08e4
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/DedupConfig.java
@@ -0,0 +1,178 @@
+
+package eu.dnetlib.pace.config;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.nio.charset.StandardCharsets;
+import java.util.AbstractMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
+
+import org.antlr.stringtemplate.StringTemplate;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Maps;
+
+import eu.dnetlib.pace.model.ClusteringDef;
+import eu.dnetlib.pace.model.FieldDef;
+import eu.dnetlib.pace.tree.support.TreeNodeDef;
+import eu.dnetlib.pace.util.PaceException;
+
+public class DedupConfig implements Config, Serializable {
+ private static String CONFIG_TEMPLATE = "dedupConfig.st";
+
+ private PaceConfig pace;
+
+ private WfConfig wf;
+
+ @JsonIgnore
+ private Map> blacklists;
+
+ private static Map defaults = Maps.newHashMap();
+
+ static {
+ defaults.put("dedupRun", "001");
+ defaults.put("entityType", "result");
+ defaults.put("subEntityType", "resulttype");
+ defaults.put("subEntityValue", "publication");
+ defaults.put("orderField", "title");
+ defaults.put("queueMaxSize", "2000");
+ defaults.put("groupMaxSize", "10");
+ defaults.put("slidingWindowSize", "200");
+ defaults.put("rootBuilder", "result");
+ defaults.put("includeChildren", "true");
+ defaults.put("maxIterations", "20");
+ defaults.put("idPath", "$.id");
+ }
+
+ public DedupConfig() {
+ }
+
+ public static DedupConfig load(final String json) {
+
+ final DedupConfig config;
+ try {
+ config = new ObjectMapper().readValue(json, DedupConfig.class);
+ config.getPace().initModel();
+ config.getPace().initTranslationMap();
+
+ config.blacklists = config
+ .getPace()
+ .getBlacklists()
+ .entrySet()
+ .stream()
+ .map(
+ e -> new AbstractMap.SimpleEntry>(e.getKey(),
+ e
+ .getValue()
+ .stream()
+ .filter(s -> !StringUtils.isBlank(s))
+ .map(Pattern::compile)
+ .collect(Collectors.toList())))
+ .collect(
+ Collectors
+ .toMap(
+ e -> e.getKey(),
+ e -> (Predicate & Serializable) s -> e
+ .getValue()
+ .stream()
+ .filter(p -> p.matcher(s).matches())
+ .findFirst()
+ .isPresent()))
+
+ ;
+
+ return config;
+ } catch (IOException | PatternSyntaxException e) {
+ throw new PaceException("Error in parsing configuration json", e);
+ }
+
+ }
+
+ public static DedupConfig loadDefault() throws IOException {
+ return loadDefault(new HashMap());
+ }
+
+ public static DedupConfig loadDefault(final Map params) throws IOException {
+
+ final StringTemplate template = new StringTemplate(new DedupConfig().readFromClasspath(CONFIG_TEMPLATE));
+
+ for (final Entry e : defaults.entrySet()) {
+ template.setAttribute(e.getKey(), e.getValue());
+ }
+ for (final Entry e : params.entrySet()) {
+ if (template.getAttribute(e.getKey()) != null) {
+ template.getAttributes().computeIfPresent(e.getKey(), (o, o2) -> e.getValue());
+ } else {
+ template.setAttribute(e.getKey(), e.getValue());
+ }
+ }
+
+ final String json = template.toString();
+ return load(json);
+ }
+
+ private String readFromClasspath(final String resource) throws IOException {
+ return IOUtils.toString(getClass().getResource(resource), StandardCharsets.UTF_8);
+ }
+
+ public PaceConfig getPace() {
+ return pace;
+ }
+
+ public void setPace(final PaceConfig pace) {
+ this.pace = pace;
+ }
+
+ public WfConfig getWf() {
+ return wf;
+ }
+
+ public void setWf(final WfConfig wf) {
+ this.wf = wf;
+ }
+
+ @Override
+ public String toString() {
+ try {
+ return new ObjectMapper().writeValueAsString(this);
+ } catch (IOException e) {
+ throw new PaceException("unable to serialise configuration", e);
+ }
+ }
+
+ @Override
+ public Map decisionTree() {
+ return getPace().getDecisionTree();
+ }
+
+ @Override
+ public List model() {
+ return getPace().getModel();
+ }
+
+ @Override
+ public List clusterings() {
+ return getPace().getClustering();
+ }
+
+ @Override
+ public Map> blacklists() {
+ return blacklists;
+ }
+
+ @Override
+ public Map translationMap() {
+ return getPace().translationMap();
+ }
+
+}
diff --git a/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/PaceConfig.java b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/PaceConfig.java
new file mode 100644
index 000000000..f1bc49f4a
--- /dev/null
+++ b/dhp-pace-core/src/main/java/eu/dnetlib/pace/config/PaceConfig.java
@@ -0,0 +1,108 @@
+
+package eu.dnetlib.pace.config;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.common.collect.Maps;
+import com.ibm.icu.text.Transliterator;
+
+import eu.dnetlib.pace.common.AbstractPaceFunctions;
+import eu.dnetlib.pace.model.ClusteringDef;
+import eu.dnetlib.pace.model.FieldDef;
+import eu.dnetlib.pace.tree.support.TreeNodeDef;
+import eu.dnetlib.pace.util.PaceResolver;
+
+public class PaceConfig extends AbstractPaceFunctions implements Serializable {
+
+ private List model;
+
+ private List clustering;
+ private Map decisionTree;
+
+ private Map> blacklists;
+ private Map