diff --git a/dhp-build/dhp-build-assembly-resources/pom.xml b/dhp-build/dhp-build-assembly-resources/pom.xml index 8bae191d3..2a89a26fd 100644 --- a/dhp-build/dhp-build-assembly-resources/pom.xml +++ b/dhp-build/dhp-build-assembly-resources/pom.xml @@ -6,7 +6,7 @@ eu.dnetlib.dhp dhp-build - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT dhp-build-assembly-resources diff --git a/dhp-build/dhp-build-properties-maven-plugin/pom.xml b/dhp-build/dhp-build-properties-maven-plugin/pom.xml index ad8cd57b4..5be114e3c 100644 --- a/dhp-build/dhp-build-properties-maven-plugin/pom.xml +++ b/dhp-build/dhp-build-properties-maven-plugin/pom.xml @@ -6,7 +6,7 @@ eu.dnetlib.dhp dhp-build - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT dhp-build-properties-maven-plugin diff --git a/dhp-build/dhp-code-style/pom.xml b/dhp-build/dhp-code-style/pom.xml index 08f5de9ee..515ed35ce 100644 --- a/dhp-build/dhp-code-style/pom.xml +++ b/dhp-build/dhp-code-style/pom.xml @@ -5,7 +5,7 @@ eu.dnetlib.dhp dhp-code-style - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT jar diff --git a/dhp-build/pom.xml b/dhp-build/pom.xml index 369e25b24..d2dcbc36e 100644 --- a/dhp-build/pom.xml +++ b/dhp-build/pom.xml @@ -4,7 +4,7 @@ eu.dnetlib.dhp dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT dhp-build pom diff --git a/dhp-common/pom.xml b/dhp-common/pom.xml index 60e66f45a..0e7652dd3 100644 --- a/dhp-common/pom.xml +++ b/dhp-common/pom.xml @@ -5,7 +5,7 @@ eu.dnetlib.dhp dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT ../ diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/FunctionalInterfaceSupport.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/FunctionalInterfaceSupport.java index e793e3f29..c6c9d8044 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/FunctionalInterfaceSupport.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/FunctionalInterfaceSupport.java @@ -2,6 +2,7 @@ package eu.dnetlib.dhp.common; import java.io.Serializable; +import java.util.function.Consumer; import java.util.function.Supplier; /** Provides serializable and throwing extensions to standard functional interfaces. */ @@ -10,6 +11,16 @@ public class FunctionalInterfaceSupport { private FunctionalInterfaceSupport() { } + /** + * Serializable consumer of any kind of objects. To be used withing spark processing pipelines when supplying + * functions externally. + * + * @param + */ + @FunctionalInterface + public interface SerializableConsumer extends Consumer, Serializable { + } + /** * Serializable supplier of any kind of objects. To be used withing spark processing pipelines when supplying * functions externally. diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java b/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java index 1909ddcca..6e02ca614 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/common/PacePerson.java @@ -16,6 +16,12 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.hash.Hashing; +/** + * PacePerson tries to derive information from the fullname string of an author. Such informations are Names, Surnames + * an Fullname split into terms. It provides also an additional field for the original data. The calculation of the + * names and the surnames is not always possible. When it is impossible to assert which are the names and the surnames, + * the lists are empty. + */ public class PacePerson { private static final String UTF8 = "UTF-8"; @@ -26,10 +32,19 @@ public class PacePerson { private static Set particles = null; + /** + * Capitalizes a string + * + * @param s the string to capitalize + * @return the input string with capital letter + */ public static final String capitalize(final String s) { return WordUtils.capitalize(s.toLowerCase(), ' ', '-'); } + /** + * Adds a dot to a string with length equals to 1 + */ public static final String dotAbbreviations(final String s) { return s.length() == 1 ? s + "." : s; } @@ -46,6 +61,12 @@ public class PacePerson { return h; } + /** + * The constructor of the class. It fills the fields of the class basing on the input fullname. + * + * @param s the input string (fullname of the author) + * @param aggressive set the string normalization type + */ public PacePerson(String s, final boolean aggressive) { original = s; s = Normalizer.normalize(s, Normalizer.Form.NFD); @@ -64,6 +85,7 @@ public class PacePerson { // s = s.replaceAll("[\\W&&[^,-]]", ""); } + // if the string contains a comma, it can derive surname and name by splitting on it if (s.contains(",")) { final String[] arr = s.split(","); if (arr.length == 1) { @@ -74,21 +96,23 @@ public class PacePerson { fullname.addAll(surname); fullname.addAll(name); } - } else { + } else { // otherwise, it should rely on CAPS terms and short terms fullname = splitTerms(s); int lastInitialPosition = fullname.size(); boolean hasSurnameInUpperCase = false; + // computes lastInitialPosition and hasSurnameInUpperCase for (int i = 0; i < fullname.size(); i++) { final String term = fullname.get(i); if (term.length() == 1) { - lastInitialPosition = i; + lastInitialPosition = i; // first word in the name longer than 1 (to avoid name with dots) } else if (term.equals(term.toUpperCase())) { - hasSurnameInUpperCase = true; + hasSurnameInUpperCase = true; // if one of the words is CAPS } } + // manages particular cases of fullnames if (lastInitialPosition < fullname.size() - 1) { // Case: Michele G. Artini name = fullname.subList(0, lastInitialPosition + 1); surname = fullname.subList(lastInitialPosition + 1, fullname.size()); diff --git a/dhp-common/src/test/java/eu/dnetlib/dhp/common/PacePersonTest.java b/dhp-common/src/test/java/eu/dnetlib/dhp/common/PacePersonTest.java new file mode 100644 index 000000000..5ebd7213e --- /dev/null +++ b/dhp-common/src/test/java/eu/dnetlib/dhp/common/PacePersonTest.java @@ -0,0 +1,27 @@ + +package eu.dnetlib.dhp.common; + +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +public class PacePersonTest { + + @Test + public void pacePersonTest1() { + + PacePerson p = new PacePerson("Artini, Michele", false); + assertEquals("Artini", p.getSurnameString()); + assertEquals("Michele", p.getNameString()); + assertEquals("Artini, Michele", p.getNormalisedFullname()); + } + + @Test + public void pacePersonTest2() { + PacePerson p = new PacePerson("Michele G. Artini", false); + assertEquals("Artini, Michele G.", p.getNormalisedFullname()); + assertEquals("Michele G", p.getNameString()); + assertEquals("Artini", p.getSurnameString()); + } + +} diff --git a/dhp-schemas/pom.xml b/dhp-schemas/pom.xml index 5e864cf94..56fb8ead2 100644 --- a/dhp-schemas/pom.xml +++ b/dhp-schemas/pom.xml @@ -5,7 +5,7 @@ eu.dnetlib.dhp dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT ../ diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java index e32dd10fa..a3c1610db 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/common/ModelConstants.java @@ -14,6 +14,7 @@ public class ModelConstants { public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource"; public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions"; public static final String DNET_COUNTRY_TYPE = "dnet:countries"; + public static final String DNET_REVIEW_LEVELS = "dnet:review_levels"; public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository"; public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry"; @@ -25,6 +26,10 @@ public class ModelConstants { public static final String ORP_RESULTTYPE_CLASSID = "other"; public static final String RESULT_RESULT = "resultResult"; + /** + * @deprecated Use {@link ModelConstants#RELATIONSHIP} instead. + */ + @Deprecated public static final String PUBLICATION_DATASET = "publicationDataset"; public static final String IS_RELATED_TO = "isRelatedTo"; public static final String SUPPLEMENT = "supplement"; @@ -34,6 +39,12 @@ public class ModelConstants { public static final String IS_PART_OF = "IsPartOf"; public static final String HAS_PARTS = "HasParts"; public static final String RELATIONSHIP = "relationship"; + public static final String CITATION = "citation"; + public static final String CITES = "cites"; + public static final String IS_CITED_BY = "IsCitedBy"; + public static final String REVIEW = "review"; + public static final String REVIEWS = "reviews"; + public static final String IS_REVIEWED_BY = "IsReviewedBy"; public static final String RESULT_PROJECT = "resultProject"; public static final String OUTCOME = "outcome"; diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java index 2b7d3846c..29d495261 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java @@ -31,7 +31,7 @@ public class Instance implements Serializable { // typed results private Field processingchargecurrency; - private Field refereed; // peer-review status + private Qualifier refereed; // peer-review status public Field getLicense() { return license; @@ -113,11 +113,11 @@ public class Instance implements Serializable { this.processingchargecurrency = processingchargecurrency; } - public Field getRefereed() { + public Qualifier getRefereed() { return refereed; } - public void setRefereed(Field refereed) { + public void setRefereed(Qualifier refereed) { this.refereed = refereed; } diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Measure.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Measure.java new file mode 100644 index 000000000..c0c14d10d --- /dev/null +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Measure.java @@ -0,0 +1,59 @@ + +package eu.dnetlib.dhp.schema.oaf; + +import java.util.List; + +import com.google.common.base.Objects; + +/** + * Represent a measure, must be further described by a system available resource providing name and descriptions. + */ +public class Measure { + + /** + * Unique measure identifier. + */ + private String id; + + /** + * List of units associated with this measure. KeyValue provides a pair to store the laber (key) and the value, plus + * common provenance information. + */ + private List unit; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public List getUnit() { + return unit; + } + + public void setUnit(List unit) { + this.unit = unit; + } + + public void mergeFrom(Measure m) { + // TODO + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Measure measure = (Measure) o; + return Objects.equal(id, measure.id) && + Objects.equal(unit, measure.unit); + } + + @Override + public int hashCode() { + return Objects.hashCode(id, unit); + } +} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Programme.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Programme.java new file mode 100644 index 000000000..00dc32fbc --- /dev/null +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Programme.java @@ -0,0 +1,38 @@ + +package eu.dnetlib.dhp.schema.oaf; + +import java.io.Serializable; +import java.util.Objects; + +public class Programme implements Serializable { + private String code; + private String description; + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Programme programme = (Programme) o; + return Objects.equals(code, programme.code); + } + +} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Project.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Project.java index 924c08cc9..1fcfb305e 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Project.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Project.java @@ -58,6 +58,8 @@ public class Project extends OafEntity implements Serializable { private Float fundedamount; + private List programme; + public Field getWebsiteurl() { return websiteurl; } @@ -266,6 +268,14 @@ public class Project extends OafEntity implements Serializable { this.fundedamount = fundedamount; } + public List getProgramme() { + return programme; + } + + public void setProgramme(List programme) { + this.programme = programme; + } + @Override public void mergeFrom(OafEntity e) { super.mergeFrom(e); @@ -320,6 +330,9 @@ public class Project extends OafEntity implements Serializable { fundedamount = p.getFundedamount() != null && compareTrust(this, e) < 0 ? p.getFundedamount() : fundedamount; + + programme = mergeLists(programme, p.getProgramme()); + mergeOAFDataInfo(e); } } diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java index c928992aa..17a50d7ac 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Relation.java @@ -41,6 +41,16 @@ public class Relation extends Oaf { */ private String target; + /** + * Was this relationship authoritatively validated? + */ + private Boolean validated; + + /** + * When was this relationship authoritatively validated. + */ + private String validationDate; + /** * List of relation specific properties. Values include 'similarityLevel', indicating the similarity score between a * pair of publications. @@ -95,6 +105,22 @@ public class Relation extends Oaf { this.properties = properties; } + public Boolean getValidated() { + return validated; + } + + public void setValidated(Boolean validated) { + this.validated = validated; + } + + public String getValidationDate() { + return validationDate; + } + + public void setValidationDate(String validationDate) { + this.validationDate = validationDate; + } + public void mergeFrom(final Relation r) { checkArgument(Objects.equals(getSource(), r.getSource()), "source ids must be equal"); @@ -137,4 +163,5 @@ public class Relation extends Oaf { public int hashCode() { return Objects.hash(relType, subRelType, relClass, source, target, collectedfrom); } + } diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java index 09e77a244..fdd42ab7d 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java @@ -9,6 +9,8 @@ import java.util.stream.Collectors; public class Result extends OafEntity implements Serializable { + private List measures; + private List author; // resulttype allows subclassing results into publications | datasets | software @@ -53,6 +55,14 @@ public class Result extends OafEntity implements Serializable { private List instance; + public List getMeasures() { + return measures; + } + + public void setMeasures(List measures) { + this.measures = measures; + } + public List getAuthor() { return author; } @@ -231,6 +241,8 @@ public class Result extends OafEntity implements Serializable { Result r = (Result) e; + // TODO consider merging also Measures + instance = mergeLists(instance, r.getInstance()); if (r.getBestaccessright() != null && compareTrust(this, r) < 0) @@ -254,9 +266,6 @@ public class Result extends OafEntity implements Serializable { final StructuredProperty p = baseMainTitle; title = title.stream().filter(t -> t != p).collect(Collectors.toList()); } -// -// -// title.remove(baseMainTitle); } StructuredProperty newMainTitle = null; @@ -264,18 +273,18 @@ public class Result extends OafEntity implements Serializable { newMainTitle = getMainTitle(r.getTitle()); if (newMainTitle != null) { final StructuredProperty p = newMainTitle; - title = title.stream().filter(t -> t != p).collect(Collectors.toList()); + r.setTitle(r.getTitle().stream().filter(t -> t != p).collect(Collectors.toList())); } - - // r.getTitle().remove(newMainTitle); } - if (newMainTitle != null && compareTrust(this, r) < 0) + if (newMainTitle != null && compareTrust(this, r) < 0) { baseMainTitle = newMainTitle; + } title = mergeLists(title, r.getTitle()); - if (title != null && baseMainTitle != null) + if (title != null && baseMainTitle != null) { title.add(baseMainTitle); + } relevantdate = mergeLists(relevantdate, r.getRelevantdate()); diff --git a/dhp-schemas/src/test/java/eu/dnetlib/dhp/schema/oaf/MeasureTest.java b/dhp-schemas/src/test/java/eu/dnetlib/dhp/schema/oaf/MeasureTest.java new file mode 100644 index 000000000..26b4407c9 --- /dev/null +++ b/dhp-schemas/src/test/java/eu/dnetlib/dhp/schema/oaf/MeasureTest.java @@ -0,0 +1,57 @@ + +package eu.dnetlib.dhp.schema.oaf; + +import java.io.IOException; +import java.util.List; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; + +public class MeasureTest { + + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() + .setSerializationInclusion(JsonInclude.Include.NON_NULL); + + @Test + public void testMeasureSerialization() throws IOException { + + Measure popularity = new Measure(); + popularity.setId("popularity"); + popularity + .setUnit( + Lists + .newArrayList( + unit("score", "0.5"))); + + Measure influence = new Measure(); + influence.setId("influence"); + influence + .setUnit( + Lists + .newArrayList( + unit("score", "0.3"))); + + List m = Lists.newArrayList(popularity, influence); + + String s = OBJECT_MAPPER.writeValueAsString(m); + System.out.println(s); + + List mm = OBJECT_MAPPER.readValue(s, new TypeReference>() { + }); + + Assertions.assertNotNull(mm); + } + + private KeyValue unit(String key, String value) { + KeyValue unit = new KeyValue(); + unit.setKey(key); + unit.setValue(value); + return unit; + } + +} diff --git a/dhp-workflows/dhp-actionmanager/pom.xml b/dhp-workflows/dhp-actionmanager/pom.xml index ec6247102..b50c6705b 100644 --- a/dhp-workflows/dhp-actionmanager/pom.xml +++ b/dhp-workflows/dhp-actionmanager/pom.xml @@ -4,7 +4,7 @@ eu.dnetlib.dhp dhp-workflows - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT dhp-actionmanager diff --git a/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/migration/ProtoConverter.java b/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/migration/ProtoConverter.java index e55c0eb7b..8ea877aec 100644 --- a/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/migration/ProtoConverter.java +++ b/dhp-workflows/dhp-actionmanager/src/main/java/eu/dnetlib/dhp/actionmanager/migration/ProtoConverter.java @@ -96,12 +96,21 @@ public class ProtoConverter implements Serializable { .stream() .distinct() .collect(Collectors.toCollection(ArrayList::new)) : null); - i.setRefereed(mapStringField(ri.getRefereed())); + i.setRefereed(mapRefereed(ri.getRefereed())); i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount())); i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency())); return i; } + private static Qualifier mapRefereed(FieldTypeProtos.StringField refereed) { + Qualifier q = new Qualifier(); + q.setClassid(refereed.getValue()); + q.setSchemename(refereed.getValue()); + q.setSchemeid("dnet:review_levels"); + q.setSchemename("dnet:review_levels"); + return q; + } + private static List convertExternalRefs(OafProtos.Oaf oaf) { ResultProtos.Result r = oaf.getEntity().getResult(); if (r.getExternalReferenceCount() > 0) { diff --git a/dhp-workflows/dhp-aggregation/pom.xml b/dhp-workflows/dhp-aggregation/pom.xml index 9f082df70..c04910a58 100644 --- a/dhp-workflows/dhp-aggregation/pom.xml +++ b/dhp-workflows/dhp-aggregation/pom.xml @@ -4,7 +4,7 @@ eu.dnetlib.dhp dhp-workflows - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT dhp-aggregation @@ -38,48 +38,6 @@ ${project.version} - - eu.dnetlib - dnet-actionmanager-common - - - eu.dnetlib - dnet-openaireplus-mapping-utils - - - saxonica - saxon - - - saxonica - saxon-dom - - - jgrapht - jgrapht - - - net.sf.ehcache - ehcache - - - org.springframework - spring-test - - - org.apache.* - * - - - apache - * - - - - - eu.dnetlib - dnet-openaire-data-protos - net.sf.saxon @@ -100,11 +58,15 @@ jaxen + - org.apache.hadoop - hadoop-distcp + org.apache.commons + commons-csv + 1.8 + + diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java new file mode 100644 index 000000000..c6dab13a0 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgramme.java @@ -0,0 +1,123 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + +import java.util.HashMap; +import java.util.Optional; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.sql.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.common.HdfsSupport; +import scala.Tuple2; + +public class PrepareProgramme { + + private static final Logger log = LoggerFactory.getLogger(PrepareProgramme.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + public static void main(String[] args) throws Exception { + + String jsonConfiguration = IOUtils + .toString( + PrepareProgramme.class + .getResourceAsStream( + "/eu/dnetlib/dhp/actionmanager/project/prepare_programme_parameters.json")); + + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + + parser.parseArgument(args); + + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + + final String programmePath = parser.get("programmePath"); + log.info("programmePath {}: ", programmePath); + + final String outputPath = parser.get("outputPath"); + log.info("outputPath {}: ", outputPath); + + SparkConf conf = new SparkConf(); + + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + removeOutputDir(spark, outputPath); + exec(spark, programmePath, outputPath); + }); + } + + private static void removeOutputDir(SparkSession spark, String path) { + HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); + } + + private static void exec(SparkSession spark, String programmePath, String outputPath) { + Dataset programme = readPath(spark, programmePath, CSVProgramme.class); + + programme + .toJavaRDD() + .filter(p -> !p.getCode().contains("FP7")) + .mapToPair(csvProgramme -> new Tuple2<>(csvProgramme.getCode(), csvProgramme)) + .reduceByKey((a, b) -> { + if (StringUtils.isEmpty(a.getShortTitle())) { + if (StringUtils.isEmpty(b.getShortTitle())) { + if (StringUtils.isEmpty(a.getTitle())) { + if (StringUtils.isNotEmpty(b.getTitle())) { + a.setShortTitle(b.getTitle()); + a.setLanguage(b.getLanguage()); + } + } else {// notIsEmpty a.getTitle + if (StringUtils.isEmpty(b.getTitle())) { + a.setShortTitle(a.getTitle()); + } else { + if (b.getLanguage().equalsIgnoreCase("en")) { + a.setShortTitle(b.getTitle()); + a.setLanguage(b.getLanguage()); + } else { + a.setShortTitle(a.getTitle()); + } + } + } + } else {// not isEmpty b.getShortTitle + a.setShortTitle(b.getShortTitle()); + // a.setLanguage(b.getLanguage()); + } + } + return a; + + }) + .map(p -> { + CSVProgramme csvProgramme = p._2(); + if (StringUtils.isEmpty(csvProgramme.getShortTitle())) { + csvProgramme.setShortTitle(csvProgramme.getTitle()); + } + return OBJECT_MAPPER.writeValueAsString(csvProgramme); + }) + .saveAsTextFile(outputPath); + + } + + public static Dataset readPath( + SparkSession spark, String inputPath, Class clazz) { + return spark + .read() + .textFile(inputPath) + .map((MapFunction) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz)); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java new file mode 100644 index 000000000..78aed1a69 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjects.java @@ -0,0 +1,120 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + +import java.util.*; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.function.FlatMapFunction; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SaveMode; +import org.apache.spark.sql.SparkSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme; +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.common.HdfsSupport; +import scala.Tuple2; + +public class PrepareProjects { + + private static final Logger log = LoggerFactory.getLogger(PrepareProgramme.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final HashMap programmeMap = new HashMap<>(); + + public static void main(String[] args) throws Exception { + + String jsonConfiguration = IOUtils + .toString( + PrepareProjects.class + .getResourceAsStream( + "/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json")); + + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + + parser.parseArgument(args); + + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + + final String projectPath = parser.get("projectPath"); + log.info("projectPath {}: ", projectPath); + + final String outputPath = parser.get("outputPath"); + log.info("outputPath {}: ", outputPath); + + final String dbProjectPath = parser.get("dbProjectPath"); + log.info("dbProjectPath {}: ", dbProjectPath); + + SparkConf conf = new SparkConf(); + + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + removeOutputDir(spark, outputPath); + exec(spark, projectPath, dbProjectPath, outputPath); + }); + } + + private static void removeOutputDir(SparkSession spark, String path) { + HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); + } + + private static void exec(SparkSession spark, String projectPath, String dbProjectPath, String outputPath) { + Dataset project = readPath(spark, projectPath, CSVProject.class); + Dataset dbProjects = readPath(spark, dbProjectPath, ProjectSubset.class); + + dbProjects + .joinWith(project, dbProjects.col("code").equalTo(project.col("id")), "left") + .flatMap(getTuple2CSVProjectFlatMapFunction(), Encoders.bean(CSVProject.class)) + .filter(Objects::nonNull) + .write() + .mode(SaveMode.Overwrite) + .option("compression", "gzip") + .json(outputPath); + + } + + private static FlatMapFunction, CSVProject> getTuple2CSVProjectFlatMapFunction() { + return (FlatMapFunction, CSVProject>) value -> { + Optional csvProject = Optional.ofNullable(value._2()); + List csvProjectList = new ArrayList<>(); + if (csvProject.isPresent()) { + + String[] programme = csvProject.get().getProgramme().split(";"); + Arrays + .stream(programme) + .forEach(p -> { + CSVProject proj = new CSVProject(); + proj.setProgramme(p); + proj.setId(csvProject.get().getId()); + csvProjectList.add(proj); + }); + } + return csvProjectList.iterator(); + }; + } + + public static Dataset readPath( + SparkSession spark, String inputPath, Class clazz) { + return spark + .read() + .textFile(inputPath) + .map((MapFunction) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz)); + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ProjectSubset.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ProjectSubset.java new file mode 100644 index 000000000..2fccbc516 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ProjectSubset.java @@ -0,0 +1,17 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.Serializable; + +public class ProjectSubset implements Serializable { + + private String code; + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ReadProjectsFromDB.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ReadProjectsFromDB.java new file mode 100644 index 000000000..2d541d2f9 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/ReadProjectsFromDB.java @@ -0,0 +1,115 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.BufferedWriter; +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; +import java.sql.ResultSet; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.common.DbClient; + +public class ReadProjectsFromDB implements Closeable { + + private final DbClient dbClient; + private static final Log log = LogFactory.getLog(ReadProjectsFromDB.class); + private final Configuration conf; + private final BufferedWriter writer; + private final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private final static String query = "SELECT code " + + "from projects where id like 'corda__h2020%' "; + + public static void main(final String[] args) throws Exception { + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils + .toString( + ReadProjectsFromDB.class + .getResourceAsStream( + "/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json"))); + + parser.parseArgument(args); + + final String dbUrl = parser.get("postgresUrl"); + final String dbUser = parser.get("postgresUser"); + final String dbPassword = parser.get("postgresPassword"); + final String hdfsPath = parser.get("hdfsPath"); + final String hdfsNameNode = parser.get("hdfsNameNode"); + + try (final ReadProjectsFromDB rbl = new ReadProjectsFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser, + dbPassword)) { + + log.info("Processing projects..."); + rbl.execute(query, rbl::processProjectsEntry); + + } + } + + public void execute(final String sql, final Function> producer) throws Exception { + + final Consumer consumer = rs -> producer.apply(rs).forEach(r -> writeProject(r)); + + dbClient.processResults(sql, consumer); + } + + public List processProjectsEntry(ResultSet rs) { + try { + ProjectSubset p = new ProjectSubset(); + p.setCode(rs.getString("code")); + + return Arrays.asList(p); + + } catch (final Exception e) { + throw new RuntimeException(e); + } + } + + protected void writeProject(final ProjectSubset r) { + try { + writer.write(OBJECT_MAPPER.writeValueAsString(r)); + writer.newLine(); + } catch (final Exception e) { + throw new RuntimeException(e); + } + } + + public ReadProjectsFromDB( + final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword) + throws Exception { + + this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); + this.conf = new Configuration(); + this.conf.set("fs.defaultFS", hdfsNameNode); + FileSystem fileSystem = FileSystem.get(this.conf); + Path hdfsWritePath = new Path(hdfsPath); + FSDataOutputStream fsDataOutputStream = null; + if (fileSystem.exists(hdfsWritePath)) { + fileSystem.delete(hdfsWritePath, false); + } + fsDataOutputStream = fileSystem.create(hdfsWritePath); + + this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); + } + + @Override + public void close() throws IOException { + dbClient.close(); + writer.close(); + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/SparkAtomicActionJob.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/SparkAtomicActionJob.java new file mode 100644 index 000000000..1023e2d19 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/SparkAtomicActionJob.java @@ -0,0 +1,161 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Objects; +import java.util.Optional; +import java.util.function.Consumer; + +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; +import org.apache.hadoop.mapred.TextOutputFormat; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.api.java.function.MapGroupsFunction; +import org.apache.spark.rdd.SequenceFileRDDFunctions; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SaveMode; +import org.apache.spark.sql.SparkSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme; +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.common.HdfsSupport; +import eu.dnetlib.dhp.schema.action.AtomicAction; +import eu.dnetlib.dhp.schema.common.ModelSupport; +import eu.dnetlib.dhp.schema.oaf.Programme; +import eu.dnetlib.dhp.schema.oaf.Project; +import eu.dnetlib.dhp.utils.DHPUtils; +import scala.Function1; +import scala.Tuple2; +import scala.runtime.BoxedUnit; + +public class SparkAtomicActionJob { + private static final Logger log = LoggerFactory.getLogger(SparkAtomicActionJob.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private static final HashMap programmeMap = new HashMap<>(); + + public static void main(String[] args) throws Exception { + + String jsonConfiguration = IOUtils + .toString( + SparkAtomicActionJob.class + .getResourceAsStream( + "/eu/dnetlib/dhp/actionmanager/project/action_set_parameters.json")); + + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + + parser.parseArgument(args); + + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + + String projectPath = parser.get("projectPath"); + log.info("projectPath: {}", projectPath); + + final String outputPath = parser.get("outputPath"); + log.info("outputPath {}: ", outputPath); + + final String programmePath = parser.get("programmePath"); + log.info("programmePath {}: ", programmePath); + + SparkConf conf = new SparkConf(); + + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + removeOutputDir(spark, outputPath); + getAtomicActions( + spark, + projectPath, + programmePath, + outputPath); + }); + } + + private static void removeOutputDir(SparkSession spark, String path) { + HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); + } + + private static void getAtomicActions(SparkSession spark, String projectPatH, + String programmePath, + String outputPath) { + + Dataset project = readPath(spark, projectPatH, CSVProject.class); + Dataset programme = readPath(spark, programmePath, CSVProgramme.class); + + project + .joinWith(programme, project.col("programme").equalTo(programme.col("code")), "left") + .map(c -> { + CSVProject csvProject = c._1(); + Optional csvProgramme = Optional.ofNullable(c._2()); + if (csvProgramme.isPresent()) { + Project p = new Project(); + p + .setId( + createOpenaireId( + ModelSupport.entityIdPrefix.get("project"), + "corda__h2020", csvProject.getId())); + Programme pm = new Programme(); + pm.setCode(csvProject.getProgramme()); + pm.setDescription(csvProgramme.get().getShortTitle()); + p.setProgramme(Arrays.asList(pm)); + return p; + } + + return null; + }, Encoders.bean(Project.class)) + .filter(Objects::nonNull) + .groupByKey( + (MapFunction) p -> p.getId(), + Encoders.STRING()) + .mapGroups((MapGroupsFunction) (s, it) -> { + Project first = it.next(); + it.forEachRemaining(p -> { + first.mergeFrom(p); + }); + return first; + }, Encoders.bean(Project.class)) + .toJavaRDD() + .map(p -> new AtomicAction(Project.class, p)) + .mapToPair( + aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()), + new Text(OBJECT_MAPPER.writeValueAsString(aa)))) + .saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class); + + } + + public static Dataset readPath( + SparkSession spark, String inputPath, Class clazz) { + return spark + .read() + .textFile(inputPath) + .map((MapFunction) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz)); + } + + public static String createOpenaireId( + final String prefix, final String nsPrefix, final String id) { + + return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(id)); + + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVParser.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVParser.java new file mode 100644 index 000000000..ef29a6b6a --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVParser.java @@ -0,0 +1,37 @@ + +package eu.dnetlib.dhp.actionmanager.project.csvutils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVRecord; +import org.apache.commons.lang.reflect.FieldUtils; + +public class CSVParser { + + public List parse(String csvFile, String classForName) + throws ClassNotFoundException, IOException, IllegalAccessException, InstantiationException { + final CSVFormat format = CSVFormat.EXCEL + .withHeader() + .withDelimiter(';') + .withQuote('"') + .withTrim(); + List ret = new ArrayList<>(); + final org.apache.commons.csv.CSVParser parser = org.apache.commons.csv.CSVParser.parse(csvFile, format); + final Set headers = parser.getHeaderMap().keySet(); + Class clazz = Class.forName(classForName); + for (CSVRecord csvRecord : parser.getRecords()) { + final Object cc = clazz.newInstance(); + for (String header : headers) { + FieldUtils.writeField(cc, header, csvRecord.get(header), true); + + } + ret.add((R) cc); + } + + return ret; + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProgramme.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProgramme.java new file mode 100644 index 000000000..a9069e510 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProgramme.java @@ -0,0 +1,52 @@ + +package eu.dnetlib.dhp.actionmanager.project.csvutils; + +import java.io.Serializable; + +public class CSVProgramme implements Serializable { + private String rcn; + private String code; + private String title; + private String shortTitle; + private String language; + + public String getRcn() { + return rcn; + } + + public void setRcn(String rcn) { + this.rcn = rcn; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getShortTitle() { + return shortTitle; + } + + public void setShortTitle(String shortTitle) { + this.shortTitle = shortTitle; + } + + public String getLanguage() { + return language; + } + + public void setLanguage(String language) { + this.language = language; + } +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProject.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProject.java new file mode 100644 index 000000000..ff18c6260 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/CSVProject.java @@ -0,0 +1,197 @@ + +package eu.dnetlib.dhp.actionmanager.project.csvutils; + +import java.io.Serializable; + +public class CSVProject implements Serializable { + private String rcn; + private String id; + private String acronym; + private String status; + private String programme; + private String topics; + private String frameworkProgramme; + private String title; + private String startDate; + private String endDate; + private String projectUrl; + private String objective; + private String totalCost; + private String ecMaxContribution; + private String call; + private String fundingScheme; + private String coordinator; + private String coordinatorCountry; + private String participants; + private String participantCountries; + private String subjects; + + public String getRcn() { + return rcn; + } + + public void setRcn(String rcn) { + this.rcn = rcn; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getProgramme() { + return programme; + } + + public void setProgramme(String programme) { + this.programme = programme; + } + + public String getTopics() { + return topics; + } + + public void setTopics(String topics) { + this.topics = topics; + } + + public String getFrameworkProgramme() { + return frameworkProgramme; + } + + public void setFrameworkProgramme(String frameworkProgramme) { + this.frameworkProgramme = frameworkProgramme; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getStartDate() { + return startDate; + } + + public void setStartDate(String startDate) { + this.startDate = startDate; + } + + public String getEndDate() { + return endDate; + } + + public void setEndDate(String endDate) { + this.endDate = endDate; + } + + public String getProjectUrl() { + return projectUrl; + } + + public void setProjectUrl(String projectUrl) { + this.projectUrl = projectUrl; + } + + public String getObjective() { + return objective; + } + + public void setObjective(String objective) { + this.objective = objective; + } + + public String getTotalCost() { + return totalCost; + } + + public void setTotalCost(String totalCost) { + this.totalCost = totalCost; + } + + public String getEcMaxContribution() { + return ecMaxContribution; + } + + public void setEcMaxContribution(String ecMaxContribution) { + this.ecMaxContribution = ecMaxContribution; + } + + public String getCall() { + return call; + } + + public void setCall(String call) { + this.call = call; + } + + public String getFundingScheme() { + return fundingScheme; + } + + public void setFundingScheme(String fundingScheme) { + this.fundingScheme = fundingScheme; + } + + public String getCoordinator() { + return coordinator; + } + + public void setCoordinator(String coordinator) { + this.coordinator = coordinator; + } + + public String getCoordinatorCountry() { + return coordinatorCountry; + } + + public void setCoordinatorCountry(String coordinatorCountry) { + this.coordinatorCountry = coordinatorCountry; + } + + public String getParticipants() { + return participants; + } + + public void setParticipants(String participants) { + this.participants = participants; + } + + public String getParticipantCountries() { + return participantCountries; + } + + public void setParticipantCountries(String participantCountries) { + this.participantCountries = participantCountries; + } + + public String getSubjects() { + return subjects; + } + + public void setSubjects(String subjects) { + this.subjects = subjects; + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/ReadCSV.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/ReadCSV.java new file mode 100644 index 000000000..2b72b229a --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/csvutils/ReadCSV.java @@ -0,0 +1,97 @@ + +package eu.dnetlib.dhp.actionmanager.project.csvutils; + +import java.io.BufferedWriter; +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.httpconnector.HttpConnector; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; + +public class ReadCSV implements Closeable { + private static final Log log = LogFactory.getLog(ReadCSV.class); + private final Configuration conf; + private final BufferedWriter writer; + private final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + private String csvFile; + + public static void main(final String[] args) throws Exception { + final ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils + .toString( + ReadCSV.class + .getResourceAsStream( + "/eu/dnetlib/dhp/actionmanager/project/parameters.json"))); + + parser.parseArgument(args); + + final String fileURL = parser.get("fileURL"); + final String hdfsPath = parser.get("hdfsPath"); + final String hdfsNameNode = parser.get("hdfsNameNode"); + final String classForName = parser.get("classForName"); + + try (final ReadCSV readCSV = new ReadCSV(hdfsPath, hdfsNameNode, fileURL)) { + + log.info("Getting CSV file..."); + readCSV.execute(classForName); + + } + } + + public void execute(final String classForName) throws Exception { + CSVParser csvParser = new CSVParser(); + csvParser + .parse(csvFile, classForName) + .stream() + .forEach(p -> write(p)); + + } + + @Override + public void close() throws IOException { + writer.close(); + } + + public ReadCSV( + final String hdfsPath, + final String hdfsNameNode, + final String fileURL) + throws Exception { + this.conf = new Configuration(); + this.conf.set("fs.defaultFS", hdfsNameNode); + HttpConnector httpConnector = new HttpConnector(); + FileSystem fileSystem = FileSystem.get(this.conf); + Path hdfsWritePath = new Path(hdfsPath); + FSDataOutputStream fsDataOutputStream = null; + if (fileSystem.exists(hdfsWritePath)) { + fileSystem.delete(hdfsWritePath, false); + } + fsDataOutputStream = fileSystem.create(hdfsWritePath); + + this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); + this.csvFile = httpConnector.getInputSource(fileURL); + ; + } + + protected void write(final Object p) { + try { + writer.write(OBJECT_MAPPER.writeValueAsString(p)); + writer.newLine(); + } catch (final Exception e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorPluginErrorLogList.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorPluginErrorLogList.java new file mode 100644 index 000000000..9d3f88265 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorPluginErrorLogList.java @@ -0,0 +1,20 @@ + +package eu.dnetlib.dhp.actionmanager.project.httpconnector; + +import java.util.LinkedList; + +public class CollectorPluginErrorLogList extends LinkedList { + + private static final long serialVersionUID = -6925786561303289704L; + + @Override + public String toString() { + String log = new String(); + int index = 0; + for (String errorMessage : this) { + log += String.format("Retry #%s: %s / ", index++, errorMessage); + } + return log; + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorServiceException.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorServiceException.java new file mode 100644 index 000000000..9167d97b4 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/CollectorServiceException.java @@ -0,0 +1,20 @@ + +package eu.dnetlib.dhp.actionmanager.project.httpconnector; + +public class CollectorServiceException extends Exception { + + private static final long serialVersionUID = 7523999812098059764L; + + public CollectorServiceException(String string) { + super(string); + } + + public CollectorServiceException(String string, Throwable exception) { + super(string, exception); + } + + public CollectorServiceException(Throwable exception) { + super(exception); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnector.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnector.java new file mode 100644 index 000000000..e20518b55 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnector.java @@ -0,0 +1,240 @@ + +package eu.dnetlib.dhp.actionmanager.project.httpconnector; + +import java.io.IOException; +import java.io.InputStream; +import java.net.*; +import java.security.GeneralSecurityException; +import java.security.cert.X509Certificate; +import java.util.List; +import java.util.Map; + +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.math.NumberUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +/** + * @author jochen, michele, andrea + */ +public class HttpConnector { + + private static final Log log = LogFactory.getLog(HttpConnector.class); + + private int maxNumberOfRetry = 6; + private int defaultDelay = 120; // seconds + private int readTimeOut = 120; // seconds + + private String responseType = null; + + private String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)"; + + public HttpConnector() { + CookieHandler.setDefault(new CookieManager(null, CookiePolicy.ACCEPT_ALL)); + } + + /** + * Given the URL returns the content via HTTP GET + * + * @param requestUrl the URL + * @return the content of the downloaded resource + * @throws CollectorServiceException when retrying more than maxNumberOfRetry times + */ + public String getInputSource(final String requestUrl) throws CollectorServiceException { + return attemptDownlaodAsString(requestUrl, 1, new CollectorPluginErrorLogList()); + } + + /** + * Given the URL returns the content as a stream via HTTP GET + * + * @param requestUrl the URL + * @return the content of the downloaded resource as InputStream + * @throws CollectorServiceException when retrying more than maxNumberOfRetry times + */ + public InputStream getInputSourceAsStream(final String requestUrl) throws CollectorServiceException { + return attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList()); + } + + private String attemptDownlaodAsString(final String requestUrl, final int retryNumber, + final CollectorPluginErrorLogList errorList) + throws CollectorServiceException { + try { + InputStream s = attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList()); + try { + return IOUtils.toString(s); + } catch (IOException e) { + log.error("error while retrieving from http-connection occured: " + requestUrl, e); + Thread.sleep(defaultDelay * 1000); + errorList.add(e.getMessage()); + return attemptDownlaodAsString(requestUrl, retryNumber + 1, errorList); + } finally { + IOUtils.closeQuietly(s); + } + } catch (InterruptedException e) { + throw new CollectorServiceException(e); + } + } + + private InputStream attemptDownload(final String requestUrl, final int retryNumber, + final CollectorPluginErrorLogList errorList) + throws CollectorServiceException { + + if (retryNumber > maxNumberOfRetry) { + throw new CollectorServiceException("Max number of retries exceeded. Cause: \n " + errorList); + } + + log.debug("Downloading " + requestUrl + " - try: " + retryNumber); + try { + InputStream input = null; + + try { + final HttpURLConnection urlConn = (HttpURLConnection) new URL(requestUrl).openConnection(); + urlConn.setInstanceFollowRedirects(false); + urlConn.setReadTimeout(readTimeOut * 1000); + urlConn.addRequestProperty("User-Agent", userAgent); + + if (log.isDebugEnabled()) { + logHeaderFields(urlConn); + } + + int retryAfter = obtainRetryAfter(urlConn.getHeaderFields()); + if (retryAfter > 0 && urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) { + log.warn("waiting and repeating request after " + retryAfter + " sec."); + Thread.sleep(retryAfter * 1000); + errorList.add("503 Service Unavailable"); + urlConn.disconnect(); + return attemptDownload(requestUrl, retryNumber + 1, errorList); + } else if ((urlConn.getResponseCode() == HttpURLConnection.HTTP_MOVED_PERM) + || (urlConn.getResponseCode() == HttpURLConnection.HTTP_MOVED_TEMP)) { + final String newUrl = obtainNewLocation(urlConn.getHeaderFields()); + log.debug("The requested url has been moved to " + newUrl); + errorList + .add( + String + .format( + "%s %s. Moved to: %s", urlConn.getResponseCode(), urlConn.getResponseMessage(), + newUrl)); + urlConn.disconnect(); + return attemptDownload(newUrl, retryNumber + 1, errorList); + } else if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) { + log + .error( + String + .format("HTTP error: %s %s", urlConn.getResponseCode(), urlConn.getResponseMessage())); + Thread.sleep(defaultDelay * 1000); + errorList.add(String.format("%s %s", urlConn.getResponseCode(), urlConn.getResponseMessage())); + urlConn.disconnect(); + return attemptDownload(requestUrl, retryNumber + 1, errorList); + } else { + input = urlConn.getInputStream(); + responseType = urlConn.getContentType(); + return input; + } + } catch (IOException e) { + log.error("error while retrieving from http-connection occured: " + requestUrl, e); + Thread.sleep(defaultDelay * 1000); + errorList.add(e.getMessage()); + return attemptDownload(requestUrl, retryNumber + 1, errorList); + } + } catch (InterruptedException e) { + throw new CollectorServiceException(e); + } + } + + private void logHeaderFields(final HttpURLConnection urlConn) throws IOException { + log.debug("StatusCode: " + urlConn.getResponseMessage()); + + for (Map.Entry> e : urlConn.getHeaderFields().entrySet()) { + if (e.getKey() != null) { + for (String v : e.getValue()) { + log.debug(" key: " + e.getKey() + " - value: " + v); + } + } + } + } + + private int obtainRetryAfter(final Map> headerMap) { + for (String key : headerMap.keySet()) { + if ((key != null) && key.toLowerCase().equals("retry-after") && (headerMap.get(key).size() > 0) + && NumberUtils.isCreatable(headerMap.get(key).get(0))) { + return Integer + .parseInt(headerMap.get(key).get(0)) + 10; + } + } + return -1; + } + + private String obtainNewLocation(final Map> headerMap) throws CollectorServiceException { + for (String key : headerMap.keySet()) { + if ((key != null) && key.toLowerCase().equals("location") && (headerMap.get(key).size() > 0)) { + return headerMap.get(key).get(0); + } + } + throw new CollectorServiceException("The requested url has been MOVED, but 'location' param is MISSING"); + } + + /** + * register for https scheme; this is a workaround and not intended for the use in trusted environments + */ + public void initTrustManager() { + final X509TrustManager tm = new X509TrustManager() { + + @Override + public void checkClientTrusted(final X509Certificate[] xcs, final String string) { + } + + @Override + public void checkServerTrusted(final X509Certificate[] xcs, final String string) { + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + }; + try { + final SSLContext ctx = SSLContext.getInstance("TLS"); + ctx.init(null, new TrustManager[] { + tm + }, null); + HttpsURLConnection.setDefaultSSLSocketFactory(ctx.getSocketFactory()); + } catch (GeneralSecurityException e) { + log.fatal(e); + throw new IllegalStateException(e); + } + } + + public int getMaxNumberOfRetry() { + return maxNumberOfRetry; + } + + public void setMaxNumberOfRetry(final int maxNumberOfRetry) { + this.maxNumberOfRetry = maxNumberOfRetry; + } + + public int getDefaultDelay() { + return defaultDelay; + } + + public void setDefaultDelay(final int defaultDelay) { + this.defaultDelay = defaultDelay; + } + + public int getReadTimeOut() { + return readTimeOut; + } + + public void setReadTimeOut(final int readTimeOut) { + this.readTimeOut = readTimeOut; + } + + public String getResponseType() { + return responseType; + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/action_set_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/action_set_parameters.json new file mode 100644 index 000000000..a0856e10e --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/action_set_parameters.json @@ -0,0 +1,26 @@ +[ + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false +}, +{ +"paramName": "pjp", +"paramLongName": "projectPath", +"paramDescription": "the URL from where to get the projects file", +"paramRequired": true +}, +{ +"paramName": "pp", +"paramLongName": "programmePath", +"paramDescription": "the URL from where to get the programme file", +"paramRequired": true +}, +{ +"paramName": "o", +"paramLongName": "outputPath", +"paramDescription": "the path of the new ActionSet", +"paramRequired": true +} +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/config-default.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/config-default.xml new file mode 100644 index 000000000..fe82ae194 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/config-default.xml @@ -0,0 +1,54 @@ + + + jobTracker + yarnRM + + + nameNode + hdfs://nameservice1 + + + oozie.use.system.libpath + true + + + oozie.action.sharelib.for.spark + spark2 + + + hive_metastore_uris + thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083 + + + spark2YarnHistoryServerAddress + http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089 + + + spark2ExtraListeners + com.cloudera.spark.lineage.NavigatorAppListener + + + spark2SqlQueryExecutionListeners + com.cloudera.spark.lineage.NavigatorQueryListener + + + sparkExecutorNumber + 4 + + + spark2EventLogDir + /user/spark/spark2ApplicationHistory + + + sparkDriverMemory + 15G + + + sparkExecutorMemory + 6G + + + sparkExecutorCores + 1 + + \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml new file mode 100644 index 000000000..1e3445675 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/oozie_app/workflow.xml @@ -0,0 +1,146 @@ + + + + projectFileURL + the url where to get the projects file + + + + programmeFileURL + the url where to get the programme file + + + + outputPath + path where to store the action set + + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] + + + + + + + + + + + + + + + eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV + --hdfsNameNode${nameNode} + --fileURL${projectFileURL} + --hdfsPath${workingDir}/projects + --classForNameeu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject + + + + + + + + eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV + --hdfsNameNode${nameNode} + --fileURL${programmeFileURL} + --hdfsPath${workingDir}/programme + --classForNameeu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme + + + + + + + + eu.dnetlib.dhp.actionmanager.project.ReadProjectsFromDB + --hdfsPath${workingDir}/dbProjects + --hdfsNameNode${nameNode} + --postgresUrl${postgresURL} + --postgresUser${postgresUser} + --postgresPassword${postgresPassword} + + + + + + + + yarn + cluster + PrepareProgramme + eu.dnetlib.dhp.actionmanager.project.PrepareProgramme + dhp-aggregation-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=3840 + + --programmePath${workingDir}/programme + --outputPath${workingDir}/preparedProgramme + + + + + + + + yarn + cluster + PrepareProjects + eu.dnetlib.dhp.actionmanager.project.PrepareProjects + dhp-aggregation-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=3840 + + --projectPath${workingDir}/projects + --outputPath${workingDir}/preparedProjects + --dbProjectPath${workingDir}/dbProjects + + + + + + + + yarn + cluster + ProjectProgrammeAS + eu.dnetlib.dhp.actionmanager.project.SparkAtomicActionJob + dhp-aggregation-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=3840 + + --projectPath${workingDir}/preparedProjects + --programmePath${workingDir}/preparedProgramme + --outputPath${outputPath} + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json new file mode 100644 index 000000000..dd3de70f6 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/parameters.json @@ -0,0 +1,29 @@ +[ + + { + "paramName": "fu", + "paramLongName" : "fileURL", + "paramDescription" : "the url of the file to download", + "paramRequired" : true + }, + { + "paramName": "hp", + "paramLongName" : "hdfsPath", + "paramDescription" : "where to save the file", + "paramRequired" : true + }, + { + "paramName": "hnn", + "paramLongName" : "hdfsNameNode", + "paramDescription" : "the name node", + "paramRequired" : true + }, + { + "paramName": "cfn", + "paramLongName" : "classForName", + "paramDescription" : "the name of the class to deserialize the csv to", + "paramRequired" : true +} + + +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_programme_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_programme_parameters.json new file mode 100644 index 000000000..54083e108 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_programme_parameters.json @@ -0,0 +1,20 @@ +[ + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false +}, +{ +"paramName": "pp", +"paramLongName": "programmePath", +"paramDescription": "the URL from where to get the programme file", +"paramRequired": true +}, +{ +"paramName": "o", +"paramLongName": "outputPath", +"paramDescription": "the path of the new ActionSet", +"paramRequired": true +} +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json new file mode 100644 index 000000000..49f9c7306 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json @@ -0,0 +1,26 @@ +[ + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false +}, +{ +"paramName": "pjp", +"paramLongName": "projectPath", +"paramDescription": "the URL from where to get the programme file", +"paramRequired": true +}, +{ +"paramName": "o", +"paramLongName": "outputPath", +"paramDescription": "the path of the new ActionSet", +"paramRequired": true +}, + { + "paramName": "dbp", + "paramLongName": "dbProjectPath", + "paramDescription": "the path of the project code read from db", + "paramRequired": true + } +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json new file mode 100644 index 000000000..9a2eadaa7 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json @@ -0,0 +1,32 @@ +[ + { + "paramName": "p", + "paramLongName": "hdfsPath", + "paramDescription": "the path where storing the sequential file", + "paramRequired": true + }, + { + "paramName": "nn", + "paramLongName": "hdfsNameNode", + "paramDescription": "the name node on hdfs", + "paramRequired": true + }, + { + "paramName": "pgurl", + "paramLongName": "postgresUrl", + "paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb", + "paramRequired": true + }, + { + "paramName": "pguser", + "paramLongName": "postgresUser", + "paramDescription": "postgres user", + "paramRequired": false + }, + { + "paramName": "pgpasswd", + "paramLongName": "postgresPassword", + "paramDescription": "postgres password", + "paramRequired": false + } +] \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/CSVParserTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/CSVParserTest.java new file mode 100644 index 000000000..17fdd4511 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/CSVParserTest.java @@ -0,0 +1,41 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVParser; + +public class CSVParserTest { + + private static Path workingDir; + + @BeforeAll + public static void beforeAll() throws IOException { + workingDir = Files.createTempDirectory(CSVParserTest.class.getSimpleName()); + + } + + @Test + public void readProgrammeTest() throws Exception { + + String programmecsv = IOUtils + .toString( + getClass() + .getClassLoader() + .getResourceAsStream("eu/dnetlib/dhp/actionmanager/project/programme.csv")); + + CSVParser csvParser = new CSVParser(); + + List pl = csvParser.parse(programmecsv, "eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme"); + + System.out.println(pl.size()); + + } +} diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgrammeTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgrammeTest.java new file mode 100644 index 000000000..7f890a8a3 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProgrammeTest.java @@ -0,0 +1,94 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import org.apache.commons.io.FileUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme; + +public class PrepareProgrammeTest { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private static final ClassLoader cl = eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class + .getClassLoader(); + + private static SparkSession spark; + + private static Path workingDir; + private static final Logger log = LoggerFactory + .getLogger(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class); + + @BeforeAll + public static void beforeAll() throws IOException { + workingDir = Files + .createTempDirectory(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class.getSimpleName()); + log.info("using work dir {}", workingDir); + + SparkConf conf = new SparkConf(); + conf.setAppName(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class.getSimpleName()); + + conf.setMaster("local[*]"); + conf.set("spark.driver.host", "localhost"); + conf.set("hive.metastore.local", "true"); + conf.set("spark.ui.enabled", "false"); + conf.set("spark.sql.warehouse.dir", workingDir.toString()); + conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); + + spark = SparkSession + .builder() + .appName(PrepareProgrammeTest.class.getSimpleName()) + .config(conf) + .getOrCreate(); + } + + @AfterAll + public static void afterAll() throws IOException { + FileUtils.deleteDirectory(workingDir.toFile()); + spark.stop(); + } + + @Test + public void numberDistinctProgrammeTest() throws Exception { + PrepareProgramme + .main( + new String[] { + "-isSparkSessionManaged", + Boolean.FALSE.toString(), + "-programmePath", + getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz").getPath(), + "-outputPath", + workingDir.toString() + "/preparedProgramme" + }); + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + JavaRDD tmp = sc + .textFile(workingDir.toString() + "/preparedProgramme") + .map(item -> OBJECT_MAPPER.readValue(item, CSVProgramme.class)); + + Assertions.assertEquals(277, tmp.count()); + + Dataset verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class)); + + Assertions.assertEquals(0, verificationDataset.filter("shortTitle =''").count()); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjectTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjectTest.java new file mode 100644 index 000000000..5ff88e46f --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/PrepareProjectTest.java @@ -0,0 +1,99 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import org.apache.commons.io.FileUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme; +import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject; + +public class PrepareProjectTest { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private static final ClassLoader cl = PrepareProjectTest.class + .getClassLoader(); + + private static SparkSession spark; + + private static Path workingDir; + private static final Logger log = LoggerFactory + .getLogger(PrepareProjectTest.class); + + @BeforeAll + public static void beforeAll() throws IOException { + workingDir = Files + .createTempDirectory(PrepareProjectTest.class.getSimpleName()); + log.info("using work dir {}", workingDir); + + SparkConf conf = new SparkConf(); + conf.setAppName(PrepareProjectTest.class.getSimpleName()); + + conf.setMaster("local[*]"); + conf.set("spark.driver.host", "localhost"); + conf.set("hive.metastore.local", "true"); + conf.set("spark.ui.enabled", "false"); + conf.set("spark.sql.warehouse.dir", workingDir.toString()); + conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); + + spark = SparkSession + .builder() + .appName(PrepareProjectTest.class.getSimpleName()) + .config(conf) + .getOrCreate(); + } + + @AfterAll + public static void afterAll() throws IOException { + FileUtils.deleteDirectory(workingDir.toFile()); + spark.stop(); + } + + @Test + public void numberDistinctProjectTest() throws Exception { + PrepareProjects + .main( + new String[] { + "-isSparkSessionManaged", + Boolean.FALSE.toString(), + "-projectPath", + getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(), + "-outputPath", + workingDir.toString() + "/preparedProjects", + "-dbProjectPath", + getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/dbProject").getPath(), + + }); + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + JavaRDD tmp = sc + .textFile(workingDir.toString() + "/preparedProjects") + .map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class)); + + Assertions.assertEquals(8, tmp.count()); + + Dataset verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class)); + + Assertions.assertEquals(0, verificationDataset.filter("length(id) = 0").count()); + Assertions.assertEquals(0, verificationDataset.filter("length(programme) = 0").count()); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/SparkUpdateProjectTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/SparkUpdateProjectTest.java new file mode 100644 index 000000000..718cd8ebe --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/SparkUpdateProjectTest.java @@ -0,0 +1,94 @@ + +package eu.dnetlib.dhp.actionmanager.project; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.io.Text; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.sql.SparkSession; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.schema.action.AtomicAction; +import eu.dnetlib.dhp.schema.oaf.Project; + +public class SparkUpdateProjectTest { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private static final ClassLoader cl = eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class + .getClassLoader(); + + private static SparkSession spark; + + private static Path workingDir; + private static final Logger log = LoggerFactory + .getLogger(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class); + + @BeforeAll + public static void beforeAll() throws IOException { + workingDir = Files + .createTempDirectory(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class.getSimpleName()); + log.info("using work dir {}", workingDir); + + SparkConf conf = new SparkConf(); + conf.setAppName(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class.getSimpleName()); + + conf.setMaster("local[*]"); + conf.set("spark.driver.host", "localhost"); + conf.set("hive.metastore.local", "true"); + conf.set("spark.ui.enabled", "false"); + conf.set("spark.sql.warehouse.dir", workingDir.toString()); + conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); + + spark = SparkSession + .builder() + .appName(SparkUpdateProjectTest.class.getSimpleName()) + .config(conf) + .getOrCreate(); + } + + @AfterAll + public static void afterAll() throws IOException { + FileUtils.deleteDirectory(workingDir.toFile()); + spark.stop(); + } + + @Test + public void numberDistinctProgrammeTest() throws Exception { + SparkAtomicActionJob + .main( + new String[] { + "-isSparkSessionManaged", + Boolean.FALSE.toString(), + "-programmePath", + getClass() + .getResource("/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json.gz") + .getPath(), + "-projectPath", + getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json").getPath(), + "-outputPath", + workingDir.toString() + "/actionSet" + }); + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + JavaRDD tmp = sc + .sequenceFile(workingDir.toString() + "/actionSet", Text.class, Text.class) + .map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class)) + .map(aa -> ((Project) aa.getPayload())); + + Assertions.assertEquals(14, tmp.count()); + + } +} diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnectorTest.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnectorTest.java new file mode 100644 index 000000000..51a7019ca --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/actionmanager/project/httpconnector/HttpConnectorTest.java @@ -0,0 +1,39 @@ + +package eu.dnetlib.dhp.actionmanager.project.httpconnector; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.ssl.SSLContextBuilder; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class HttpConnectorTest { + + private static final Log log = LogFactory.getLog(HttpConnectorTest.class); + private static HttpConnector connector; + + private static final String URL = "http://cordis.europa.eu/data/reference/cordisref-H2020programmes.csv"; + private static final String URL_MISCONFIGURED_SERVER = "https://www.alexandria.unisg.ch/cgi/oai2?verb=Identify"; + private static final String URL_GOODSNI_SERVER = "https://air.unimi.it/oai/openaire?verb=Identify"; + + private static final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); + private static SSLConnectionSocketFactory sslSocketFactory; + + @BeforeAll + public static void setUp() { + connector = new HttpConnector(); + } + + @Test + + public void testGetInputSource() throws CollectorServiceException { + System.out.println(connector.getInputSource(URL)); + } + + @Test + public void testGoodServers() throws CollectorServiceException { + System.out.println(connector.getInputSource(URL_GOODSNI_SERVER)); + } + +} diff --git a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collector/worker/DnetCollectorWorkerApplicationTests.java b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collector/worker/DnetCollectorWorkerApplicationTests.java index 87bd3be3d..c745219fe 100644 --- a/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collector/worker/DnetCollectorWorkerApplicationTests.java +++ b/dhp-workflows/dhp-aggregation/src/test/java/eu/dnetlib/dhp/collector/worker/DnetCollectorWorkerApplicationTests.java @@ -8,6 +8,7 @@ import java.io.File; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import com.fasterxml.jackson.databind.ObjectMapper; @@ -19,6 +20,7 @@ import eu.dnetlib.dhp.collection.worker.utils.CollectorPluginFactory; import eu.dnetlib.message.Message; import eu.dnetlib.message.MessageManager; +@Disabled public class DnetCollectorWorkerApplicationTests { private final ArgumentApplicationParser argumentParser = mock(ArgumentApplicationParser.class); diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/dbProject b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/dbProject new file mode 100644 index 000000000..f8e3c4589 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/dbProject @@ -0,0 +1,8 @@ +{"code":"894593"} +{"code":"897004"} +{"code":"896300"} +{"code":"892890"} +{"code":"886828"} +{"code":"8867767"} +{"code":"101003374"} +{"code":"886776"} \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json.gz b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json.gz new file mode 100644 index 000000000..1afa73061 Binary files /dev/null and b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json.gz differ diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json new file mode 100644 index 000000000..b8805b2db --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json @@ -0,0 +1,16 @@ +{"rcn":"229267","id":"894593","acronym":"ICARUS","status":"SIGNED","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019","frameworkProgramme":"H2020","title":"INTEGRATED COMMON ALTITUDE REFERENCE SYSTEM FOR U-SPACE","startDate":"2020-05-01","endDate":"2022-07-31","projectUrl":"","objective":"ICARUS project proposes an innovative solution to the challenge of the Common Altitude Reference inside VLL airspaces with the definition of a new U-space service and its validation in a real operational environment. In manned aviation, the methods of determining the altitude of an aircraft are based on pressure altitude difference measurements (e.g. QFE, QNH and FL) referred to a common datum. \nThe UA flights superimpose a new challenge, since a small drone may take off and land almost from everywhere, hence reducing the original significance of QFE settings, introduced on behalf of manned pilots to display on the altimeter the 0-height at touchdown on the local runway. In fact, the possibility for n drones to take off at n different places would generate a series of n different QFE corresponding to different heights of ground pressures referred to the take-off “Home points”. Therefore for a large number drones, new methodologies and procedures shall be put in place. The ICARUS defines a new U-space U3 service tightly coupled with the interface of the existing U-space services (e.g. Tracking, and Flight Planning services). The users of ICARUS service shall be remote pilots competent to fly in BVLOS in the specific category of UAS operations and ultralight GA pilots potentially sharing the same VLL airspace. \nThe ICARUS proposed approach foresees the realization of DTM service embedded in an Application Program Interface (API) that can be queried by UAS pilot/operator (or by drone itself) based on the actual positioning of the UA along its trajectory, computed by the (E)GNSS receiver. The output of the DTM service would provide information on distance from ground/obstacles in combination with the common altitude reference.\nAccuracy, continuity, integrity and availability requirements for GNSS-based altimetry together with accuracy and resolution requirements of the DTM to be provided by ICARUS service are key topics of the study.","totalCost":"1385286,25","ecMaxContribution":"1144587,5","call":"H2020-SESAR-2019-2","fundingScheme":"SESAR-RIA","coordinator":"E-GEOS SPA","coordinatorCountry":"IT","participants":"TOPVIEW SRL;TELESPAZIO SPA;DRONERADAR SP Z O.O.;EUROCONTROL - EUROPEAN ORGANISATION FOR THE SAFETY OF AIR NAVIGATION;EUROUSC ESPANA SL;POLITECNICO DI MILANO;UNIVERSITA DEGLI STUDI DI ROMA LA SAPIENZA","participantCountries":"IT;PL;BE;ES","subjects":""} +{"rcn":"229284","id":"897004","acronym":"ISLand","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Isolation and Segregation Landscape. Archaeology of quarantine in the Indian Ocean World","startDate":"2020-11-01","endDate":"2023-10-31","projectUrl":"","objective":"The proposed research presents an experimental and completely novel investigation within the historical archaeology,\napplied to isolated contexts. The main objective of ISLand is to provide a new way of thinking about human interactions\nwithin colonial empires and bringing colonial studies into dialogue with medical history and the emerging concept of\nhealthscaping. It seeks to do so by studying quarantine facilities in the Indian Ocean World during the long nineteenth\ncentury, a crucial period for the history of European empires in that region and a flashpoint for the conceptualization of\nmodern public health. Quarantine, traditionally viewed as merely a mechanism for the control of disease, will be analyzed as\nthe outward material response to important changes taking place socially, ecologically, and politically at the time.\nThe project is a part of an international, interdisciplinary effort, combining history, archaeology, and anthropology. The\nresearcher will tap numerous archival sources and archaeological data from selected sites, examine them through social and\nspatial analysis, and systematically analyze a test case in Mauritius through the most innovative methods that target\nlandscape and standing archaeology.\nThe broader impacts of ISLand have relevance for current European approaches to the migration crisis, where the threat of\ndisease has been ignited as a potentially debilitating consequence of immigration from extra-European countries. The\ntraining-through-research project at the Stanford University, the top institution where acquiring knowledge and skills in\nhistorical archaeology, will allow the applicant to develop into a position of professional maturity with a specific\ninterdisciplinary set of skills. With the support of the host institutions in EU, the researcher will promote historical archaeology\nin European academy, stimulating new approaches in usual archaeological research and an interdisciplinary approach with\ncultural anthropology.","totalCost":"253052,16","ecMaxContribution":"253052,16","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-GF","coordinator":"UNIVERSITEIT VAN AMSTERDAM","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""} +{"rcn":"229281","id":"896300","acronym":"STRETCH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Smart Textiles for RETrofitting and Monitoring of Cultural Heritage Buildings","startDate":"2020-09-01","endDate":"2022-08-31","projectUrl":"","objective":"This project aims to develop novel techniques using smart multifunctional materials for the combined seismic-plus-energy retrofitting, and Structural Health Monitoring (SHM) of the European cultural heritage buildings (CHB). The need for upgrading the existing old and CHB is becoming increasingly important for the EU countries, due to: (1) their poor structural performance during recent earthquakes (e.g. Italy, Greece) or other natural hazards (e.g. extreme weather conditions) that have resulted in significant economic losses, and loss of human lives; and (2) their low energy performance which increases significantly their energy consumption (buildings are responsible for 40% of EU energy consumption). Moreover, the SHM of the existing buildings is crucial for assessing continuously their structural integrity and thus to provide information for planning cost effective and sustainable maintenance decisions. Since replacing the old buildings with new is not financially feasible, and even it is not allowed for CHB, their lifetime extension requires considering simultaneously both structural and energy retrofitting. It is noted that the annual cost of repair and maintenance of existing European building stock is estimated to be about 50% of the total construction budget, currently standing at more than €300 billion. To achieve cost effectiveness, STRETCH explores a novel approach, which integrates technical textile reinforcement with thermal insulation systems and strain sensors to provide simultaneous structural-plus-energy retrofitting combined with SHM, tailored for masonry cultural heritage building envelopes. The effectiveness of the proposed retrofitting system will be validated experimentally and analytically. Moreover, draft guidelines and recommendations for determining future research on the use of smart composite materials for the concurrent retrofitting (structural-plus-energy) and SHM of the existing cultural heritage buildings envelopes will be proposed.","totalCost":"183473,28","ecMaxContribution":"183473,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"JRC -JOINT RESEARCH CENTRE- EUROPEAN COMMISSION","coordinatorCountry":"BE","participants":"","participantCountries":"","subjects":""} +{"rcn":"229265","id":"892890","acronym":"RhythmicPrediction","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Rhythmic prediction in speech perception: are our brain waves in sync with our native language?","startDate":"2021-01-01","endDate":"2022-12-31","projectUrl":"","objective":"Speech has rhythmic properties that widely differ across languages. When we listen to foreign languages, we may perceive them to be more musical, or rather more rap-like than our own. Even if we are unaware of it, the rhythm and melody of language, i.e. prosody, reflects its linguistic structure. On the one hand, prosody emphasizes content words and new information with stress and accents. On the other hand, it is aligned to phrase edges, marking them with boundary tones. Prosody hence helps the listener to focus on important words and to chunk sentences into phrases, and phrases into words. In fact, prosody is even used predictively, for instance to time the onset of the next word, the next piece of new information, or the total remaining length of the utterance, so the listener can seamlessly start their own speaking turn. \nSo, the listener, or rather their brain, is actively predicting when important speech events will happen, using prosody. How prosodic rhythms are exploited to predict speech timing, however, is unclear. No link between prosody and neural predictive processing has yet been empirically made. One hypothesis is that rhythm, such as the alternation of stressed and unstressed syllables, helps listeners time their attention. Similar behavior is best captured by the notion of an internal oscillator which can be set straight by attentional spikes. While neuroscientific evidence for the relation of neural oscillators to speech processing is starting to emerge, no link to the use of prosody nor predictive listening exists, yet. Furthermore, it is still unknown how native language knowledge affects cortical oscillations, and how oscillations are affected by cross-linguistic differences in rhythmic structure. The current project combines the standing knowledge of prosodic typology with the recent advances in neuroscience on cortical oscillations, to investigate the role of internal oscillators on native prosody perception, and active speech prediction.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE DE GENEVE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229235","id":"886828","acronym":"ASAP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Advanced Solutions for Asphalt Pavements","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"The Advanced Solutions for Asphalt Pavements (ASAP) project involves the development of a unique road paving technology which will use a bio-bitumen rejuvenator to rejuvenate aged asphalt bitumen. This technology will help to extend the lifespan of asphalt pavements (roads) and will reduce the environmental and economic impact of roads and road maintenance processes. Recycling and self-healing processes will replace fossil fuel dependent technology. Self-healing will involve rejuvenating aged asphalt bitumen using a bio-rejuvenator developed using microalgae oils (rejuvenating bio-oil). Microalgae has been selected because of its fast growth, versatility and ability to survive within hostile environments, such as wastewater. \n\nASAP will utilise microalgae, cultivated within the wastewater treatment process, as a source of the rejuvenating bio-oil. The solvent (Soxhlet) processes will be used to extract the oil from the microalgae. To ensure the efficiency of the oil extraction process, an ultrasonication process will be used to pre-treat the microalgae. The suitability of rejuvenating bio-oil as a replacement for the bitumen rejuvenator (fossil fuel based) will be ascertained via a series of standard bituminous and accelerated tests. A rejuvenator-binder diffusion numerical model will be developed, based on the Delft Lattice concrete diffusion model, to determine the conditions required for rejuvenation to occur and to ascertain the healing rate of the asphalt binder. These parameters will facilitate the selection and optimisation of the asphalt self-healing systems (specifically the amount of bio-oil rejuvenator and time required) to achieve full rejuvenation. \n\nThis novel approach will benchmark the effectiveness of this intervention against existing asphalt design and maintenance processes and assess feasibility. The ASAP project presents an opportunity to revolutionise road design and maintenance processes and reduce its environmental and financial costs.","totalCost":"187572,48","ecMaxContribution":"187572,48","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"NEDERLANDSE ORGANISATIE VOOR TOEGEPAST NATUURWETENSCHAPPELIJK ONDERZOEK TNO","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""} +{"rcn":null,"id":"886776","acronym":null,"status":null,"programme":"H2020-EU.2.1.4.","topics":null,"frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""} +{"rcn":null,"id":"886776","acronym":null,"status":null,"programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4","frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""} +{"rcn":"229276","id":"895426","acronym":"DisMoBoH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Dissecting the molecular building principles of locally formed transcriptional hubs","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Numerous DNA variants have already been identified that modulate inter-individual molecular traits – most prominently gene expression. However, since finding mechanistic interpretations relating genotype to phenotype has proven challenging, the focus has shifted to higher-order regulatory features, i.e. chromatin accessibility, transcription factor (TF) binding and 3D chromatin interactions. This revealed at least two enhancer types: “lead” enhancers in which the presence of genetic variants modulates the activity of entire chromatin domains, and “dependent” ones in which variants induce subtle changes, affecting DNA accessibility, but not transcription. Although cell type-specific TFs are likely important, it remains unclear which sequence features are required to establish such enhancer hierarchies, and under which circumstances genetic variation results in altered enhancer-promoter contacts and differential gene expression. Here, we propose to investigate the molecular mechanisms that link DNA variation to TF binding, chromatin topology, and gene expression response. We will leverage data on enhancer hierarchy and sequence-specific TF binding to identify the sequence signatures that define “lead” enhancers. The results will guide the design of a synthetic locus that serves as an in vivo platform to systematically vary the building blocks of local transcriptional units: i) DNA sequence – including variations in TF binding site affinity and syntax, ii) molecular interactions between TFs, and iii) chromatin conformation. To validate our findings, we will perform optical reconstruction of chromatin architecture for a select number of DNA variants. By simultaneously perturbing co-dependent features, this proposal will provide novel mechanistic insights into the formation of local transcriptional hubs.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-RI","coordinator":"ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229288","id":"898218","acronym":"devUTRs","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Uncovering the roles of 5′UTRs in translational control during early zebrafish development","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Following fertilisation, metazoan embryos are transcriptionally silent, and embryogenesis is controlled by maternally deposited factors. Developmental progression requires the synthesis of new mRNAs and proteins in a coordinated fashion. Many posttranscriptional mechanisms regulate the fate of maternal mRNAs, but it is less understood how translational control shapes early embryogenesis. In eukaryotes, translation starts at the mRNA 5′ end, consisting of the 5′ cap and 5′ untranslated region (UTR). Protein synthesis is primarily regulated at the translation initiation step by elements within the 5′UTR. However, the role of 5′UTRs in regulating the dynamics of mRNA translation during vertebrate embryogenesis remains unexplored. For example, all vertebrate ribosomal protein (RP) mRNAs harbor a conserved terminal oligopyrimidine tract (TOP) in their 5′UTR. RP levels must be tightly controlled to ensure proper organismal development, but if and how the TOP motif mediates RP mRNA translational regulation during embryogenesis is unclear. Overall, we lack a systematic understanding of the regulatory information contained in 5′UTRs. In this work, I aim to uncover the 5′UTR in vivo rules for mRNA translational regulation during zebrafish embryogenesis. I propose to apply imaging and biochemical approaches to characterise the role of the TOP motif in RP mRNA translational regulation during embryogenesis and identify the trans-acting factor(s) that bind(s) to it (Aim 1). To systematically assess the contribution of 5′UTRs to mRNA translational regulation during zebrafish embryogenesis, I will couple a massively parallel reporter assay of 5′UTRs to polysome profiling (Aim 2). By integrating the translational behaviour of 5′UTR reporters throughout embryogenesis with sequence-based regression models, I anticipate to uncover novel cis-regulatory elements in 5′UTRs with developmental roles.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITAT BASEL","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229261","id":"893787","acronym":"HOLYHOST","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Welfare and Hosting buildings in the “Holy Land” between the 4th and the 7th c. AD","startDate":"2020-10-01","endDate":"2022-09-30","projectUrl":"","objective":"Between the 4th and the 7th century AD, many hospices dedicated to the poor, elderly, strangers and travelers were built in the countryside, along roads, around and inside cities. They were commissioned by the Church, rich pious men and women concerned by the redeem of their sins, as well as emperors who saw this as a guarantee of social stability. Welfare is thus an important phenomena of Late Antiquity, abundantly mentioned by ancient literary sources and inscriptions, particularly in the eastern part of the Empire. However, the buildings that provided shelter and care to the needy have not yet received sufficient attention from archaeologists. Except for buildings which were identified by their inventors as hostels dedicated to pilgrims, they are still invisible in the field. \nThe aim of the HOLYHOST research project is to bring this social history’s main topic on the field of archaeology. It will address the welfare issue through the archaeological and architectural survey and study of Ancient welfare and hosting establishments’ remains, in the Holy Land (Palestine and Jordan) and around. This work will contribute to a better understanding of the practices linked to hospitality, welfare, accommodation and care in Antiquity. Moreover, such establishments served as models for medieval and modern Islamic, Jewish and Christian waqf institutions (religious endowment), and welfare continues to be highly relevant nowadays, through issues still at the heart of contemporary challenges debated in Europe: poverty, social exclusion, migrant crisis, principle of reception and hospitality. This interdisciplinary and diachronic research project will thus offer many new research perspectives, in terms of history of architecture, evolution of care practices, social and political regulations.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE PARIS I PANTHEON-SORBONNE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229282","id":"896189","acronym":"MICADO","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Microbial contribution to continental wetland carbon budget","startDate":"2021-01-04","endDate":"2023-01-03","projectUrl":"","objective":"Continental wetlands are major carbon dioxide sinks but the second largest source of methane. Monitoring of wetland methane emissions revealed large inter-site variability that is hard to explain in the framework of current biogeochemical theories. Methane production in wetlands is an anaerobic microbial driven process involving a complex set of microbial metabolisms depending on the availability of (i) energy (via the presence of specific redox couples), (ii) organic substrates and (iii) specific microbial communities. To understand the complexity of microbial drivers on wetland methane emissions and quantify their contribution, the MICADO project will set up a multidisciplinary approach linking isotope organic geochemistry and environmental microbiology to assess microbial functioning in situ. As an organic geochemist I have developed an innovative approach to trace in situ microbial activity via compound specific carbon isotope analysis of microbe macromolecules and organic metabolites. The host institution is a leader in France in environmental microbiology and biogeochemistry developing high-throughput metagenomics and microbial rate assessments, for which I will be trained during the MICADO project. These techniques are highly complementary and combined they will provide a comprehensive knowledge on microbial metabolisms involved in organic matter degradation encompassing their complexity and interactions. This will revisit the relationships between organic substrate availability and microbial communities and will contribute at estimating the impact of microbial activity on wetland methane emissions. This project will give me the opportunity to acquire fundamental knowledge and to develop original lines of research that will consolidate my position as an independent scientist in biogeochemistry.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"CENTRE NATIONAL DE LA RECHERCHE SCIENTIFIQUE CNRS","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229249","id":"891624","acronym":"CuTAN","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Copper-Catalyzed Multicomponent Reactions in Tandem Processes for Target Molecule Synthesis","startDate":"2021-02-01","endDate":"2023-01-31","projectUrl":"","objective":"The invention of processes that can form several bonds, stereocentres and rings in a single process is key to a sustainable future in synthetic chemistry. Multicomponent reactions and tandem procedures are two strategies that enable the rapid build-up of molecular complexity from simple reagents. By combining these two strategies into a single procedure, the diversity, complexity and value of products can be further enhanced along with the efficiency and economy of their construction. In this project, Dr Satpathi will develop novel copper-catalyzed multicomponent couplings of unsaturated hydrocarbons (e.g. allenes, enynes) with imines and boron reagents. These procedures will provide high-value amine products with universally high regio-, diastero- and enantiocontrol. The products will bear a variety of synthetic handles, for example, amino, alkynyl/alkenyl, and boryl groups, thus the products are primed for subsequent transformation. Dr Satpathi will exploit this functionality in tandem intramolecular couplings (e.g. intramolecular Suzuki/Buchwald-Hartwig reactions) to provide core cyclic structures of drug molecules and natural products. Thus, through a tandem procedure of; 1) copper-catalyzed borofunctionalization, and; 2) subsequent transition-metal catalyzed cyclization, he will gain efficient access to highly sought-after complex molecules. Overall, the process will provide high-value, chiral, cyclic motifs from abundant, achiral, linear substrates. Finally, Dr Satpathi has identified the phthalide-isoquinoline family of alkaloids as target molecules to display the power of his tandem methodology. Dr Satpathi has devised a novel route, which begins with our tandem multifunctionalization/cyclization reaction, to provide a range of these important alkaloids. The chosen alkaloids are of particular interest as they display a range of bioactivities – for example as natural products, receptor antagonists and on-market drugs.","totalCost":"212933,76","ecMaxContribution":"212933,76","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"THE UNIVERSITY OF MANCHESTER","coordinatorCountry":"UK","participants":"","participantCountries":"","subjects":""} +{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.2.1.4.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""} +{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""} +{"rcn":"229258","id":"892834","acronym":"DENVPOC","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"qPCR Microfluidics point-of-care platform for dengue diagnosis","startDate":"2020-05-18","endDate":"2022-05-17","projectUrl":"","objective":"As a result of Global climate change and fast urbanization, global outbreaks of Dengue (DENV)/ Zika(ZIKV)/Chikungunya(CHIKV) virus have the potential to occur. The most common pathway of these infections in humans is through the female Aedes mosquito vector. DENV is an exanthematous febrile disease with varied clinical manifestations and progressions . Due to similarities in symptoms between DENV and ZIKV and CHIKV, it is difficult to make a differential diagnosis, impeding appropriate, timely medical intervention. Furthermore, cross-reactivity with ZIKV, which was recently related to microcephaly, is a serious issue. In 2016, in Brazil alone, there were 4180 microcephaly cases reported instead of 163 cases, more in line with yearly expected projections , , Thus, the sooner an accurate diagnostic which differentiates DENV from the other manifestations is critical; most especially at the early stages of the infection, to have a reliable diagnosis in pregnant women. In 2016, the OMS emergency committee declared that the outbreaks and the potentially resultant neurological disorders in Brazil were an important international state of emergency in public health, as a result of the associated secondary effects; these diseases became a Global concern. This project allows developing a highly and fast Multiplex qPCR POC platform by using FASTGENE technology with a minimal amount of patient serotype. It would reduce the time of analysis (30 to 90’ for a standard) and costs. Additionally, the sample preprocessing and thermalization will shorten real-time PCR amplification time and will be integrated within the microfluidic systems. This platform can result in a commercialized product whereupon a main market target would be pregnant women and people living or traveling through/from outbreak risk areas.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-SE","coordinator":"BFORCURE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229280","id":"895716","acronym":"DoMiCoP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"The Diffusion of Migration Control Practice. Actors, Processes and Effects.","startDate":"2021-03-01","endDate":"2023-02-28","projectUrl":"","objective":"DoMiCoP develops new understandings and perspectives to study migration control in practice in the European Union by asking one main question: how and why do communities of practice develop and diffuse the knowledge required to put migration control into action? Unlike the nexus between expert knowledge, epistemic communities and policy formulation, the nexus between everyday knowledge, communities of practice and policy implementation has not yet received systematic scholarly attention. My project bridges that gap by focusing on intermediate arenas in which communities of practice take shape most notably the meetings and trainings that gather state and non-state actors involved in putting asylum, detention and removal into practice. By building on field-based methodologies (interviews and participant observations), DoMiCoP sheds ethnographic light on the role that ‘learning from abroad’ plays in the implementation of migration control in the EU. My project’s aim is threefold: 1) Identifying arenas at intermediate levels in which communities of practice take shape; 2) Analysing the communities of practice by focusing on the configurations of actors and organizations involved, the motivations underlying their involvement, the process of knowledge development in interaction, the conflicts and negotiations; 3) Revealing the role of non-state organizations (private for profit and not-for-profit). From a theoretical point of view, this project goes beyond the classical view of the implementation as a test to assess the effectiveness of policy transfers towards an analysis of policy transfer at that level of policy-making. From an empirical point of view, the project expands knowledge about less-studied venues of policy-making and provides original thick descriptions. From a methodological point of view, the project engages with qualitative methods for the study of policy diffusion and aims at responding to their main challenges through participant observation.","totalCost":"163673,28","ecMaxContribution":"163673,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"EUROPEAN UNIVERSITY INSTITUTE","coordinatorCountry":"IT","participants":"","participantCountries":"","subjects":""} \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/programme.csv b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/programme.csv new file mode 100644 index 000000000..6a9c855a0 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/programme.csv @@ -0,0 +1,25 @@ +rcn;code;title;shortTitle;language +664331;H2020-EU.3.3.2.;Un approvisionnement en électricité à faible coût et à faibles émissions de carbone;Low-cost, low-carbon energy supply;fr +664355;H2020-EU.3.3.7.;Absorción por el mercado de la innovación energética - explotación del Programa Energía Inteligente - Europa Europe;Market uptake of energy innovation;es +664323;H2020-EU.3.3.1.;Ridurre il consumo di energia e le emissioni di carbonio grazie all'uso intelligente e sostenibile;Reducing energy consumption and carbon footprint;it +664233;H2020-EU.2.3.2.3.;Wsparcie innowacji rynkowych;Supporting market-driven innovation;pl +664199;H2020-EU.2.1.5.1.;Tecnologías para las fábricas del futuro;Technologies for Factories of the Future;es +664235;H2020-EU.3.;PRIORITÉ «Défis de société»;Societal Challenges;fr +664355;H2020-EU.3.3.7.;"Assorbimento di mercato dell'innovazione energetica - iniziative fondate sul programma ""Energia intelligente - Europa""";Market uptake of energy innovation;it +664355;H2020-EU.3.3.7.;"Markteinführung von Energieinnovationen – Aufbau auf ""Intelligente Energie – Europa";Market uptake of energy innovation;de +664235;H2020-EU.3.;"PRIORIDAD ""Retos de la sociedad""";Societal Challenges;es +664231;H2020-EU.2.3.2.2.;Mejorar la capacidad de innovación de las PYME;Enhancing the innovation capacity of SMEs;es +664223;H2020-EU.2.3.;LIDERAZGO INDUSTRIAL - Innovación en la pequeña y mediana empresa;Innovation in SMEs;es +664323;H2020-EU.3.3.1.;Réduire la consommation d'énergie et l'empreinte carbone en utilisant l'énergie de manière intelligente et durable;Reducing energy consumption and carbon footprint;fr +664323;H2020-EU.3.3.1.;Reducir el consumo de energía y la huella de carbono mediante un uso inteligente y sostenible;Reducing energy consumption and carbon footprint;es +664215;H2020-EU.2.1.6.4.;Beitrag der europäischen Forschung zu internationalen Weltraumpartnerschaften;Research in support of international space partnerships;de +664213;H2020-EU.2.1.6.3.;Permettere lo sfruttamento dei dati spaziali;;it +664213;H2020-EU.2.1.6.3.;Permettre l'exploitation des données spatiales;Enabling exploitation of space data;fr +664231;H2020-EU.2.3.2.2.;Zwiększenie zdolności MŚP pod względem innowacji;Enhancing the innovation capacity of SMEs;pl +664231;H2020-EU.2.3.2.2.;Rafforzare la capacità di innovazione delle PMI;Enhancing the innovation capacity of SMEs;it +664213;H2020-EU.2.1.6.3.;Grundlagen für die Nutzung von Weltraumdaten;Enabling exploitation of space data;de +664211;H2020-EU.2.1.6.2.;Favorecer los avances en las tecnologías espaciales;Enabling advances in space technology;es +664209;H2020-EU.2.1.6.1.;Assurer la compétitivité et l'indépendance de l'Europe et promouvoir l'innovation dans le secteur spatial européen;Competitiveness, non-dependence and innovation;fr +664231;H2020-EU.2.3.2.2.;Renforcement de la capacité d'innovation des PME;Enhancing the innovation capacity of SMEs;fr +664203;H2020-EU.2.1.5.3.;Tecnologías sostenibles, eficientes en su utilización de recursos y de baja emisión de carbono en las industrias de transformación de gran consumo energético;Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries;es +664103;H2020-EU.1.2.1.;FET Open;FET Open;es \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/projects_subset.json b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/projects_subset.json new file mode 100644 index 000000000..edf83fbc8 --- /dev/null +++ b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/projects_subset.json @@ -0,0 +1,16 @@ +{"rcn":"229267","id":"894593","acronym":"ICARUS","status":"SIGNED","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019","frameworkProgramme":"H2020","title":"INTEGRATED COMMON ALTITUDE REFERENCE SYSTEM FOR U-SPACE","startDate":"2020-05-01","endDate":"2022-07-31","projectUrl":"","objective":"ICARUS project proposes an innovative solution to the challenge of the Common Altitude Reference inside VLL airspaces with the definition of a new U-space service and its validation in a real operational environment. In manned aviation, the methods of determining the altitude of an aircraft are based on pressure altitude difference measurements (e.g. QFE, QNH and FL) referred to a common datum. \nThe UA flights superimpose a new challenge, since a small drone may take off and land almost from everywhere, hence reducing the original significance of QFE settings, introduced on behalf of manned pilots to display on the altimeter the 0-height at touchdown on the local runway. In fact, the possibility for n drones to take off at n different places would generate a series of n different QFE corresponding to different heights of ground pressures referred to the take-off “Home points”. Therefore for a large number drones, new methodologies and procedures shall be put in place. The ICARUS defines a new U-space U3 service tightly coupled with the interface of the existing U-space services (e.g. Tracking, and Flight Planning services). The users of ICARUS service shall be remote pilots competent to fly in BVLOS in the specific category of UAS operations and ultralight GA pilots potentially sharing the same VLL airspace. \nThe ICARUS proposed approach foresees the realization of DTM service embedded in an Application Program Interface (API) that can be queried by UAS pilot/operator (or by drone itself) based on the actual positioning of the UA along its trajectory, computed by the (E)GNSS receiver. The output of the DTM service would provide information on distance from ground/obstacles in combination with the common altitude reference.\nAccuracy, continuity, integrity and availability requirements for GNSS-based altimetry together with accuracy and resolution requirements of the DTM to be provided by ICARUS service are key topics of the study.","totalCost":"1385286,25","ecMaxContribution":"1144587,5","call":"H2020-SESAR-2019-2","fundingScheme":"SESAR-RIA","coordinator":"E-GEOS SPA","coordinatorCountry":"IT","participants":"TOPVIEW SRL;TELESPAZIO SPA;DRONERADAR SP Z O.O.;EUROCONTROL - EUROPEAN ORGANISATION FOR THE SAFETY OF AIR NAVIGATION;EUROUSC ESPANA SL;POLITECNICO DI MILANO;UNIVERSITA DEGLI STUDI DI ROMA LA SAPIENZA","participantCountries":"IT;PL;BE;ES","subjects":""} +{"rcn":"229284","id":"897004","acronym":"ISLand","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Isolation and Segregation Landscape. Archaeology of quarantine in the Indian Ocean World","startDate":"2020-11-01","endDate":"2023-10-31","projectUrl":"","objective":"The proposed research presents an experimental and completely novel investigation within the historical archaeology,\napplied to isolated contexts. The main objective of ISLand is to provide a new way of thinking about human interactions\nwithin colonial empires and bringing colonial studies into dialogue with medical history and the emerging concept of\nhealthscaping. It seeks to do so by studying quarantine facilities in the Indian Ocean World during the long nineteenth\ncentury, a crucial period for the history of European empires in that region and a flashpoint for the conceptualization of\nmodern public health. Quarantine, traditionally viewed as merely a mechanism for the control of disease, will be analyzed as\nthe outward material response to important changes taking place socially, ecologically, and politically at the time.\nThe project is a part of an international, interdisciplinary effort, combining history, archaeology, and anthropology. The\nresearcher will tap numerous archival sources and archaeological data from selected sites, examine them through social and\nspatial analysis, and systematically analyze a test case in Mauritius through the most innovative methods that target\nlandscape and standing archaeology.\nThe broader impacts of ISLand have relevance for current European approaches to the migration crisis, where the threat of\ndisease has been ignited as a potentially debilitating consequence of immigration from extra-European countries. The\ntraining-through-research project at the Stanford University, the top institution where acquiring knowledge and skills in\nhistorical archaeology, will allow the applicant to develop into a position of professional maturity with a specific\ninterdisciplinary set of skills. With the support of the host institutions in EU, the researcher will promote historical archaeology\nin European academy, stimulating new approaches in usual archaeological research and an interdisciplinary approach with\ncultural anthropology.","totalCost":"253052,16","ecMaxContribution":"253052,16","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-GF","coordinator":"UNIVERSITEIT VAN AMSTERDAM","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""} +{"rcn":"229281","id":"896300","acronym":"STRETCH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Smart Textiles for RETrofitting and Monitoring of Cultural Heritage Buildings","startDate":"2020-09-01","endDate":"2022-08-31","projectUrl":"","objective":"This project aims to develop novel techniques using smart multifunctional materials for the combined seismic-plus-energy retrofitting, and Structural Health Monitoring (SHM) of the European cultural heritage buildings (CHB). The need for upgrading the existing old and CHB is becoming increasingly important for the EU countries, due to: (1) their poor structural performance during recent earthquakes (e.g. Italy, Greece) or other natural hazards (e.g. extreme weather conditions) that have resulted in significant economic losses, and loss of human lives; and (2) their low energy performance which increases significantly their energy consumption (buildings are responsible for 40% of EU energy consumption). Moreover, the SHM of the existing buildings is crucial for assessing continuously their structural integrity and thus to provide information for planning cost effective and sustainable maintenance decisions. Since replacing the old buildings with new is not financially feasible, and even it is not allowed for CHB, their lifetime extension requires considering simultaneously both structural and energy retrofitting. It is noted that the annual cost of repair and maintenance of existing European building stock is estimated to be about 50% of the total construction budget, currently standing at more than €300 billion. To achieve cost effectiveness, STRETCH explores a novel approach, which integrates technical textile reinforcement with thermal insulation systems and strain sensors to provide simultaneous structural-plus-energy retrofitting combined with SHM, tailored for masonry cultural heritage building envelopes. The effectiveness of the proposed retrofitting system will be validated experimentally and analytically. Moreover, draft guidelines and recommendations for determining future research on the use of smart composite materials for the concurrent retrofitting (structural-plus-energy) and SHM of the existing cultural heritage buildings envelopes will be proposed.","totalCost":"183473,28","ecMaxContribution":"183473,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"JRC -JOINT RESEARCH CENTRE- EUROPEAN COMMISSION","coordinatorCountry":"BE","participants":"","participantCountries":"","subjects":""} +{"rcn":"229265","id":"892890","acronym":"RhythmicPrediction","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Rhythmic prediction in speech perception: are our brain waves in sync with our native language?","startDate":"2021-01-01","endDate":"2022-12-31","projectUrl":"","objective":"Speech has rhythmic properties that widely differ across languages. When we listen to foreign languages, we may perceive them to be more musical, or rather more rap-like than our own. Even if we are unaware of it, the rhythm and melody of language, i.e. prosody, reflects its linguistic structure. On the one hand, prosody emphasizes content words and new information with stress and accents. On the other hand, it is aligned to phrase edges, marking them with boundary tones. Prosody hence helps the listener to focus on important words and to chunk sentences into phrases, and phrases into words. In fact, prosody is even used predictively, for instance to time the onset of the next word, the next piece of new information, or the total remaining length of the utterance, so the listener can seamlessly start their own speaking turn. \nSo, the listener, or rather their brain, is actively predicting when important speech events will happen, using prosody. How prosodic rhythms are exploited to predict speech timing, however, is unclear. No link between prosody and neural predictive processing has yet been empirically made. One hypothesis is that rhythm, such as the alternation of stressed and unstressed syllables, helps listeners time their attention. Similar behavior is best captured by the notion of an internal oscillator which can be set straight by attentional spikes. While neuroscientific evidence for the relation of neural oscillators to speech processing is starting to emerge, no link to the use of prosody nor predictive listening exists, yet. Furthermore, it is still unknown how native language knowledge affects cortical oscillations, and how oscillations are affected by cross-linguistic differences in rhythmic structure. The current project combines the standing knowledge of prosodic typology with the recent advances in neuroscience on cortical oscillations, to investigate the role of internal oscillators on native prosody perception, and active speech prediction.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE DE GENEVE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229235","id":"886828","acronym":"ASAP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Advanced Solutions for Asphalt Pavements","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"The Advanced Solutions for Asphalt Pavements (ASAP) project involves the development of a unique road paving technology which will use a bio-bitumen rejuvenator to rejuvenate aged asphalt bitumen. This technology will help to extend the lifespan of asphalt pavements (roads) and will reduce the environmental and economic impact of roads and road maintenance processes. Recycling and self-healing processes will replace fossil fuel dependent technology. Self-healing will involve rejuvenating aged asphalt bitumen using a bio-rejuvenator developed using microalgae oils (rejuvenating bio-oil). Microalgae has been selected because of its fast growth, versatility and ability to survive within hostile environments, such as wastewater. \n\nASAP will utilise microalgae, cultivated within the wastewater treatment process, as a source of the rejuvenating bio-oil. The solvent (Soxhlet) processes will be used to extract the oil from the microalgae. To ensure the efficiency of the oil extraction process, an ultrasonication process will be used to pre-treat the microalgae. The suitability of rejuvenating bio-oil as a replacement for the bitumen rejuvenator (fossil fuel based) will be ascertained via a series of standard bituminous and accelerated tests. A rejuvenator-binder diffusion numerical model will be developed, based on the Delft Lattice concrete diffusion model, to determine the conditions required for rejuvenation to occur and to ascertain the healing rate of the asphalt binder. These parameters will facilitate the selection and optimisation of the asphalt self-healing systems (specifically the amount of bio-oil rejuvenator and time required) to achieve full rejuvenation. \n\nThis novel approach will benchmark the effectiveness of this intervention against existing asphalt design and maintenance processes and assess feasibility. The ASAP project presents an opportunity to revolutionise road design and maintenance processes and reduce its environmental and financial costs.","totalCost":"187572,48","ecMaxContribution":"187572,48","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"NEDERLANDSE ORGANISATIE VOOR TOEGEPAST NATUURWETENSCHAPPELIJK ONDERZOEK TNO","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""} +{"rcn":"229236","id":"886776","acronym":"BIOBESTicide","status":"SIGNED","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4","frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""} +{"rcn":"229276","id":"895426","acronym":"DisMoBoH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Dissecting the molecular building principles of locally formed transcriptional hubs","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Numerous DNA variants have already been identified that modulate inter-individual molecular traits – most prominently gene expression. However, since finding mechanistic interpretations relating genotype to phenotype has proven challenging, the focus has shifted to higher-order regulatory features, i.e. chromatin accessibility, transcription factor (TF) binding and 3D chromatin interactions. This revealed at least two enhancer types: “lead” enhancers in which the presence of genetic variants modulates the activity of entire chromatin domains, and “dependent” ones in which variants induce subtle changes, affecting DNA accessibility, but not transcription. Although cell type-specific TFs are likely important, it remains unclear which sequence features are required to establish such enhancer hierarchies, and under which circumstances genetic variation results in altered enhancer-promoter contacts and differential gene expression. Here, we propose to investigate the molecular mechanisms that link DNA variation to TF binding, chromatin topology, and gene expression response. We will leverage data on enhancer hierarchy and sequence-specific TF binding to identify the sequence signatures that define “lead” enhancers. The results will guide the design of a synthetic locus that serves as an in vivo platform to systematically vary the building blocks of local transcriptional units: i) DNA sequence – including variations in TF binding site affinity and syntax, ii) molecular interactions between TFs, and iii) chromatin conformation. To validate our findings, we will perform optical reconstruction of chromatin architecture for a select number of DNA variants. By simultaneously perturbing co-dependent features, this proposal will provide novel mechanistic insights into the formation of local transcriptional hubs.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-RI","coordinator":"ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229288","id":"898218","acronym":"devUTRs","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Uncovering the roles of 5′UTRs in translational control during early zebrafish development","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Following fertilisation, metazoan embryos are transcriptionally silent, and embryogenesis is controlled by maternally deposited factors. Developmental progression requires the synthesis of new mRNAs and proteins in a coordinated fashion. Many posttranscriptional mechanisms regulate the fate of maternal mRNAs, but it is less understood how translational control shapes early embryogenesis. In eukaryotes, translation starts at the mRNA 5′ end, consisting of the 5′ cap and 5′ untranslated region (UTR). Protein synthesis is primarily regulated at the translation initiation step by elements within the 5′UTR. However, the role of 5′UTRs in regulating the dynamics of mRNA translation during vertebrate embryogenesis remains unexplored. For example, all vertebrate ribosomal protein (RP) mRNAs harbor a conserved terminal oligopyrimidine tract (TOP) in their 5′UTR. RP levels must be tightly controlled to ensure proper organismal development, but if and how the TOP motif mediates RP mRNA translational regulation during embryogenesis is unclear. Overall, we lack a systematic understanding of the regulatory information contained in 5′UTRs. In this work, I aim to uncover the 5′UTR in vivo rules for mRNA translational regulation during zebrafish embryogenesis. I propose to apply imaging and biochemical approaches to characterise the role of the TOP motif in RP mRNA translational regulation during embryogenesis and identify the trans-acting factor(s) that bind(s) to it (Aim 1). To systematically assess the contribution of 5′UTRs to mRNA translational regulation during zebrafish embryogenesis, I will couple a massively parallel reporter assay of 5′UTRs to polysome profiling (Aim 2). By integrating the translational behaviour of 5′UTR reporters throughout embryogenesis with sequence-based regression models, I anticipate to uncover novel cis-regulatory elements in 5′UTRs with developmental roles.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITAT BASEL","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""} +{"rcn":"229261","id":"893787","acronym":"HOLYHOST","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Welfare and Hosting buildings in the “Holy Land” between the 4th and the 7th c. AD","startDate":"2020-10-01","endDate":"2022-09-30","projectUrl":"","objective":"Between the 4th and the 7th century AD, many hospices dedicated to the poor, elderly, strangers and travelers were built in the countryside, along roads, around and inside cities. They were commissioned by the Church, rich pious men and women concerned by the redeem of their sins, as well as emperors who saw this as a guarantee of social stability. Welfare is thus an important phenomena of Late Antiquity, abundantly mentioned by ancient literary sources and inscriptions, particularly in the eastern part of the Empire. However, the buildings that provided shelter and care to the needy have not yet received sufficient attention from archaeologists. Except for buildings which were identified by their inventors as hostels dedicated to pilgrims, they are still invisible in the field. \nThe aim of the HOLYHOST research project is to bring this social history’s main topic on the field of archaeology. It will address the welfare issue through the archaeological and architectural survey and study of Ancient welfare and hosting establishments’ remains, in the Holy Land (Palestine and Jordan) and around. This work will contribute to a better understanding of the practices linked to hospitality, welfare, accommodation and care in Antiquity. Moreover, such establishments served as models for medieval and modern Islamic, Jewish and Christian waqf institutions (religious endowment), and welfare continues to be highly relevant nowadays, through issues still at the heart of contemporary challenges debated in Europe: poverty, social exclusion, migrant crisis, principle of reception and hospitality. This interdisciplinary and diachronic research project will thus offer many new research perspectives, in terms of history of architecture, evolution of care practices, social and political regulations.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE PARIS I PANTHEON-SORBONNE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229282","id":"896189","acronym":"MICADO","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Microbial contribution to continental wetland carbon budget","startDate":"2021-01-04","endDate":"2023-01-03","projectUrl":"","objective":"Continental wetlands are major carbon dioxide sinks but the second largest source of methane. Monitoring of wetland methane emissions revealed large inter-site variability that is hard to explain in the framework of current biogeochemical theories. Methane production in wetlands is an anaerobic microbial driven process involving a complex set of microbial metabolisms depending on the availability of (i) energy (via the presence of specific redox couples), (ii) organic substrates and (iii) specific microbial communities. To understand the complexity of microbial drivers on wetland methane emissions and quantify their contribution, the MICADO project will set up a multidisciplinary approach linking isotope organic geochemistry and environmental microbiology to assess microbial functioning in situ. As an organic geochemist I have developed an innovative approach to trace in situ microbial activity via compound specific carbon isotope analysis of microbe macromolecules and organic metabolites. The host institution is a leader in France in environmental microbiology and biogeochemistry developing high-throughput metagenomics and microbial rate assessments, for which I will be trained during the MICADO project. These techniques are highly complementary and combined they will provide a comprehensive knowledge on microbial metabolisms involved in organic matter degradation encompassing their complexity and interactions. This will revisit the relationships between organic substrate availability and microbial communities and will contribute at estimating the impact of microbial activity on wetland methane emissions. This project will give me the opportunity to acquire fundamental knowledge and to develop original lines of research that will consolidate my position as an independent scientist in biogeochemistry.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"CENTRE NATIONAL DE LA RECHERCHE SCIENTIFIQUE CNRS","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229249","id":"891624","acronym":"CuTAN","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Copper-Catalyzed Multicomponent Reactions in Tandem Processes for Target Molecule Synthesis","startDate":"2021-02-01","endDate":"2023-01-31","projectUrl":"","objective":"The invention of processes that can form several bonds, stereocentres and rings in a single process is key to a sustainable future in synthetic chemistry. Multicomponent reactions and tandem procedures are two strategies that enable the rapid build-up of molecular complexity from simple reagents. By combining these two strategies into a single procedure, the diversity, complexity and value of products can be further enhanced along with the efficiency and economy of their construction. In this project, Dr Satpathi will develop novel copper-catalyzed multicomponent couplings of unsaturated hydrocarbons (e.g. allenes, enynes) with imines and boron reagents. These procedures will provide high-value amine products with universally high regio-, diastero- and enantiocontrol. The products will bear a variety of synthetic handles, for example, amino, alkynyl/alkenyl, and boryl groups, thus the products are primed for subsequent transformation. Dr Satpathi will exploit this functionality in tandem intramolecular couplings (e.g. intramolecular Suzuki/Buchwald-Hartwig reactions) to provide core cyclic structures of drug molecules and natural products. Thus, through a tandem procedure of; 1) copper-catalyzed borofunctionalization, and; 2) subsequent transition-metal catalyzed cyclization, he will gain efficient access to highly sought-after complex molecules. Overall, the process will provide high-value, chiral, cyclic motifs from abundant, achiral, linear substrates. Finally, Dr Satpathi has identified the phthalide-isoquinoline family of alkaloids as target molecules to display the power of his tandem methodology. Dr Satpathi has devised a novel route, which begins with our tandem multifunctionalization/cyclization reaction, to provide a range of these important alkaloids. The chosen alkaloids are of particular interest as they display a range of bioactivities – for example as natural products, receptor antagonists and on-market drugs.","totalCost":"212933,76","ecMaxContribution":"212933,76","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"THE UNIVERSITY OF MANCHESTER","coordinatorCountry":"UK","participants":"","participantCountries":"","subjects":""} +{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""} +{"rcn":"229258","id":"892834","acronym":"DENVPOC","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"qPCR Microfluidics point-of-care platform for dengue diagnosis","startDate":"2020-05-18","endDate":"2022-05-17","projectUrl":"","objective":"As a result of Global climate change and fast urbanization, global outbreaks of Dengue (DENV)/ Zika(ZIKV)/Chikungunya(CHIKV) virus have the potential to occur. The most common pathway of these infections in humans is through the female Aedes mosquito vector. DENV is an exanthematous febrile disease with varied clinical manifestations and progressions . Due to similarities in symptoms between DENV and ZIKV and CHIKV, it is difficult to make a differential diagnosis, impeding appropriate, timely medical intervention. Furthermore, cross-reactivity with ZIKV, which was recently related to microcephaly, is a serious issue. In 2016, in Brazil alone, there were 4180 microcephaly cases reported instead of 163 cases, more in line with yearly expected projections , , Thus, the sooner an accurate diagnostic which differentiates DENV from the other manifestations is critical; most especially at the early stages of the infection, to have a reliable diagnosis in pregnant women. In 2016, the OMS emergency committee declared that the outbreaks and the potentially resultant neurological disorders in Brazil were an important international state of emergency in public health, as a result of the associated secondary effects; these diseases became a Global concern. This project allows developing a highly and fast Multiplex qPCR POC platform by using FASTGENE technology with a minimal amount of patient serotype. It would reduce the time of analysis (30 to 90’ for a standard) and costs. Additionally, the sample preprocessing and thermalization will shorten real-time PCR amplification time and will be integrated within the microfluidic systems. This platform can result in a commercialized product whereupon a main market target would be pregnant women and people living or traveling through/from outbreak risk areas.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-SE","coordinator":"BFORCURE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229280","id":"895716","acronym":"DoMiCoP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"The Diffusion of Migration Control Practice. Actors, Processes and Effects.","startDate":"2021-03-01","endDate":"2023-02-28","projectUrl":"","objective":"DoMiCoP develops new understandings and perspectives to study migration control in practice in the European Union by asking one main question: how and why do communities of practice develop and diffuse the knowledge required to put migration control into action? Unlike the nexus between expert knowledge, epistemic communities and policy formulation, the nexus between everyday knowledge, communities of practice and policy implementation has not yet received systematic scholarly attention. My project bridges that gap by focusing on intermediate arenas in which communities of practice take shape most notably the meetings and trainings that gather state and non-state actors involved in putting asylum, detention and removal into practice. By building on field-based methodologies (interviews and participant observations), DoMiCoP sheds ethnographic light on the role that ‘learning from abroad’ plays in the implementation of migration control in the EU. My project’s aim is threefold: 1) Identifying arenas at intermediate levels in which communities of practice take shape; 2) Analysing the communities of practice by focusing on the configurations of actors and organizations involved, the motivations underlying their involvement, the process of knowledge development in interaction, the conflicts and negotiations; 3) Revealing the role of non-state organizations (private for profit and not-for-profit). From a theoretical point of view, this project goes beyond the classical view of the implementation as a test to assess the effectiveness of policy transfers towards an analysis of policy transfer at that level of policy-making. From an empirical point of view, the project expands knowledge about less-studied venues of policy-making and provides original thick descriptions. From a methodological point of view, the project engages with qualitative methods for the study of policy diffusion and aims at responding to their main challenges through participant observation.","totalCost":"163673,28","ecMaxContribution":"163673,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"EUROPEAN UNIVERSITY INSTITUTE","coordinatorCountry":"IT","participants":"","participantCountries":"","subjects":""} +{"rcn":"229297","id":"954782","acronym":"MiniLLock","status":"SIGNED","programme":"H2020-EU.3.;H2020-EU.2.3.;H2020-EU.2.1.","topics":"EIC-SMEInst-2018-2020","frameworkProgramme":"H2020","title":"Mini Launch Lock devices for small satellites","startDate":"2020-05-01","endDate":"2022-04-30","projectUrl":"","objective":"Space industry is experiencing the most important paradigm shift in its history with the rise of small satellites and megaconstellations.\nSatellite miniaturization requires to reduce significantly production and orbit launching costs. To address the\nnew challenge of this manufacturing process and switch from craftsmanship to industrialization, space industry is turning\ntowards other domains looking for new solutions, disruptive technologies, and manufacturing process.\nMini Launch Lock devices for small satellites (MiniLLock) proposes innovative actuators on the cutting edge of customer\ndemand. They offer plug and play solutions that can directly be integrated into industry for satellites robotized production.\nMiniLLock is smaller, lighter, safer, with a longer lifetime and generates significantly less shocks and vibrations than\nstandard actuators such as electromagnet and pyrotechnics. MiniLLock offers performances which have never been reached\nwith any other materials.\nNimesis is the only company that can provide such cost-effective actuators suitable to small satellite with high performances\nand reliability, enabling features previously impossible.\nMiniLLock will accelerate and leverage the commercialization of Nimesis technology and ensure Europe worldwide\nleadership\nand independence in the new space emergent environment.\nNimesis ambitions to become the global leader of this domain with a turnover of € 26 million and a market share of 28% in\n2027.","totalCost":"2413543,75","ecMaxContribution":"1689480,63","call":"H2020-EIC-SMEInst-2018-2020-3","fundingScheme":"SME-2b","coordinator":"NIMESIS TECHNOLOGY SARL","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""} +{"rcn":"229299","id":"101003374","acronym":"NOPHOS","status":"SIGNED","programme":"H2020-EU.4.","topics":"WF-02-2019","frameworkProgramme":"H2020","title":"Unravelling protein phosphorylation mechanisms and phosphoproteome changes under nitrosative stress conditions in E.coli","startDate":"2020-07-01","endDate":"2022-06-30","projectUrl":"","objective":"Currently, we face a global antibiotic resistance crisis aggravated by the slow development of more effective and anti-resistance promoting therapeutical solutions. Protein phosphorylation (PP) has recently emerged as one of the major post-translational modification in bacteria, involved in the regulation of multiple physiological processes. In this MSCA individual fellowship application we aim to bridge the current gap in the field for prokaryotes by unravelling the unknown regulatory role of PP on proteins involved in nitrosative stress (NS) detoxification in the model bacterium E.coli. We propose to examine for the first time both global protein modifications (e.g. phosphoproteomics) under nitrogen species stress, as well as characterize PP in individual proteins involved in NS response. We will construct a network model that reflect the phosphoproteomic changes upon NS in E.coli, that may pave the way for the design of new bacterial targets. Understanding how bacteria respond to the chemical weapons of the human innate system is fundamental to develop efficient therapies. We will pioneer research on the mechanism and the regulation of nitric oxide detoxification proteins already identified as phosphorylated, by analyzing how this modification influences their stability and activity in vitro and in vivo. This project opens up new research paths on bacterial detoxification systems and signalling in general, addressing for the first time the role of PP in these processes. The proposal brings together transversal and scientific skills that will enable the researcher to lead the development of this emerging field and position herself as an expert in the area, and aims at establishing the importance of PP in NO microbial response, a novelty in this field.","totalCost":"147815,04","ecMaxContribution":"147815,04","call":"H2020-WF-02-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSIDADE NOVA DE LISBOA","coordinatorCountry":"PT","participants":"","participantCountries":"","subjects":""} \ No newline at end of file diff --git a/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz new file mode 100644 index 000000000..85440ceed Binary files /dev/null and b/dhp-workflows/dhp-aggregation/src/test/resources/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz differ diff --git a/dhp-workflows/dhp-blacklist/pom.xml b/dhp-workflows/dhp-blacklist/pom.xml index a3cc15b74..04d334cd7 100644 --- a/dhp-workflows/dhp-blacklist/pom.xml +++ b/dhp-workflows/dhp-blacklist/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-broker-events/pom.xml b/dhp-workflows/dhp-broker-events/pom.xml index cc323d109..eddc042c6 100644 --- a/dhp-workflows/dhp-broker-events/pom.xml +++ b/dhp-workflows/dhp-broker-events/pom.xml @@ -1,11 +1,9 @@ - + dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 @@ -63,7 +61,7 @@ eu.dnetlib dnet-openaire-broker-common - [2.0.1,3.0.0) + [3.0.1,4.0.0) diff --git a/dhp-workflows/dhp-broker-events/report.xml b/dhp-workflows/dhp-broker-events/report.xml new file mode 100644 index 000000000..6e706f723 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/report.xml @@ -0,0 +1,37 @@ + + + + Feature Extraction + + + TCPFLOW + 1.5.0 + + 4.2.1 (4.2.1 Compatible Apple LLVM 11.0.0 (clang-1100.0.33.8)) + -D_THREAD_SAFE -pthread -I/usr/local/include -I/usr/local/include -DUTC_OFFSET=+0000 + -g -D_THREAD_SAFE -pthread -g -O3 -MD -Wpointer-arith -Wmissing-declarations -Wmissing-prototypes -Wshadow -Wwrite-strings -Wcast-align -Waggregate-return -Wbad-function-cast -Wcast-qual -Wundef -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wc++-compat -Wmissing-noreturn -Wall -Wstrict-prototypes -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wmissing-declarations -Wmissing-prototypes -Wshadow -Wwrite-strings -Wcast-align -Waggregate-return -Wbad-function-cast -Wcast-qual -Wundef -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wc++-compat -Wmissing-noreturn -Wall -Wstrict-prototypes + -g -D_THREAD_SAFE -pthread -g -O3 -Wall -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wshadow -Wwrite-strings -Wcast-align -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wmissing-noreturn -Woverloaded-virtual -Wsign-promo -funit-at-a-time -Weffc++ -std=c++11 -Wall -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wshadow -Wwrite-strings -Wcast-align -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wmissing-noreturn -Woverloaded-virtual -Wsign-promo -funit-at-a-time -Weffc++ + -L/usr/local/lib -L/usr/local/lib + -lpython2.7 -lpython2.7 -lpcap -lbz2 -lexpat -lsqlite3 -lcrypto -lssl -lcrypto -ldl -lz + 2019-10-11T01:16:58 + + + + + Darwin + 19.5.0 + Darwin Kernel Version 19.5.0: Tue May 26 20:41:44 PDT 2020; root:xnu-6153.121.2~2/RELEASE_X86_64 + Micheles-MBP.local + x86_64 + tcpflow + 501 + michele + 2020-06-15T14:55:03Z + + + + + 0 diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/Event.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/Event.java index 0512a3813..f94d286e4 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/Event.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/Event.java @@ -1,9 +1,15 @@ package eu.dnetlib.dhp.broker.model; +import java.io.Serializable; import java.util.Map; -public class Event { +public class Event implements Serializable { + + /** + * + */ + private static final long serialVersionUID = -5936790326505781395L; private String eventId; diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/EventFactory.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/EventFactory.java index df33fae0d..bf4f62d24 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/EventFactory.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/model/EventFactory.java @@ -6,17 +6,13 @@ import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Author; -import eu.dnetlib.dhp.schema.oaf.KeyValue; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class EventFactory { @@ -37,8 +33,7 @@ public class EventFactory { final Map map = createMapFromResult(updateInfo); final String eventId = calculateEventId( - updateInfo.getTopicPath(), updateInfo.getTarget().getOriginalId().get(0), - updateInfo.getHighlightValueAsString()); + updateInfo.getTopicPath(), updateInfo.getTarget().getOriginalId(), updateInfo.getHighlightValueAsString()); res.setEventId(eventId); res.setProducerId(PRODUCER_ID); @@ -54,53 +49,31 @@ public class EventFactory { private static Map createMapFromResult(final UpdateInfo updateInfo) { final Map map = new HashMap<>(); - final Result source = updateInfo.getSource(); - final Result target = updateInfo.getTarget(); + final OpenaireBrokerResult source = updateInfo.getSource(); + final OpenaireBrokerResult target = updateInfo.getTarget(); - final List collectedFrom = target.getCollectedfrom(); - if (collectedFrom.size() == 1) { - map.put("target_datasource_id", collectedFrom.get(0).getKey()); - map.put("target_datasource_name", collectedFrom.get(0).getValue()); - } + map.put("target_datasource_id", target.getCollectedFromId()); + map.put("target_datasource_name", target.getCollectedFromName()); - final List ids = target.getOriginalId(); - if (ids.size() > 0) { - map.put("target_publication_id", ids.get(0)); - } + map.put("target_publication_id", target.getOriginalId()); - final List titles = target.getTitle(); + final List titles = target.getTitles(); if (titles.size() > 0) { map.put("target_publication_title", titles.get(0)); } - final long date = parseDateTolong(target.getDateofacceptance().getValue()); + final long date = parseDateTolong(target.getPublicationdate()); if (date > 0) { map.put("target_dateofacceptance", date); } - final List subjects = target.getSubject(); - if (subjects.size() > 0) { - map - .put( - "target_publication_subject_list", - subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList())); - } - - final List authors = target.getAuthor(); - if (authors.size() > 0) { - map - .put( - "target_publication_author_list", - authors.stream().map(Author::getFullname).collect(Collectors.toList())); - } + map.put("target_publication_subject_list", target.getSubjects()); + map.put("target_publication_author_list", target.getCreators()); // PROVENANCE INFO map.put("trust", updateInfo.getTrust()); - final List sourceCollectedFrom = source.getCollectedfrom(); - if (sourceCollectedFrom.size() == 1) { - map.put("provenance_datasource_id", sourceCollectedFrom.get(0).getKey()); - map.put("provenance_datasource_name", sourceCollectedFrom.get(0).getValue()); - } + map.put("provenance_datasource_id", source.getCollectedFromId()); + map.put("provenance_datasource_name", source.getCollectedFromName()); map.put("provenance_publication_id_list", source.getOriginalId()); return map; diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsApplication.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsApplication.java index ecf4e3eff..c1f12f43c 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsApplication.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/GenerateEventsApplication.java @@ -3,15 +3,9 @@ package eu.dnetlib.dhp.broker.oa; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; import java.util.Optional; -import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.tuple.Pair; import org.apache.spark.SparkConf; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; @@ -24,40 +18,20 @@ import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.broker.model.Event; -import eu.dnetlib.dhp.broker.model.EventFactory; -import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsReferencedBy; -import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsRelatedTo; -import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedBy; -import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedTo; -import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetReferences; -import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMissingProject; -import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMoreProject; -import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsReferencedBy; -import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsRelatedTo; -import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedBy; -import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedTo; -import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationReferences; -import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMissingSoftware; -import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMoreSoftware; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAbstract; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAuthorOrcid; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingOpenAccess; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPid; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPublicationDate; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingSubject; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreOpenAccess; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMorePid; -import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreSubject; import eu.dnetlib.dhp.broker.oa.util.BrokerConstants; +import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; +import eu.dnetlib.dhp.broker.oa.util.EventFinder; import eu.dnetlib.dhp.broker.oa.util.EventGroup; -import eu.dnetlib.dhp.broker.oa.util.ResultAggregator; -import eu.dnetlib.dhp.broker.oa.util.ResultGroup; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; +import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultAggregator; +import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup; +import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.OpenaireBrokerResultAggregator; +import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedEntityFactory; +import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject; import eu.dnetlib.dhp.common.HdfsSupport; -import eu.dnetlib.dhp.schema.oaf.OafEntity; +import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; import eu.dnetlib.dhp.schema.oaf.Project; import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Relation; @@ -72,40 +46,6 @@ public class GenerateEventsApplication { private static final Logger log = LoggerFactory.getLogger(GenerateEventsApplication.class); - // Simple Matchers - private static final UpdateMatcher enrichMissingAbstract = new EnrichMissingAbstract(); - private static final UpdateMatcher enrichMissingAuthorOrcid = new EnrichMissingAuthorOrcid(); - private static final UpdateMatcher enrichMissingOpenAccess = new EnrichMissingOpenAccess(); - private static final UpdateMatcher enrichMissingPid = new EnrichMissingPid(); - private static final UpdateMatcher enrichMissingPublicationDate = new EnrichMissingPublicationDate(); - private static final UpdateMatcher enrichMissingSubject = new EnrichMissingSubject(); - private static final UpdateMatcher enrichMoreOpenAccess = new EnrichMoreOpenAccess(); - private static final UpdateMatcher enrichMorePid = new EnrichMorePid(); - private static final UpdateMatcher enrichMoreSubject = new EnrichMoreSubject(); - - // Advanced matchers - private static final UpdateMatcher>, ?> enrichMissingProject = new EnrichMissingProject(); - private static final UpdateMatcher>, ?> enrichMoreProject = new EnrichMoreProject(); - - private static final UpdateMatcher>, ?> enrichMissingSoftware = new EnrichMissingSoftware(); - private static final UpdateMatcher>, ?> enrichMoreSoftware = new EnrichMoreSoftware(); - - private static final UpdateMatcher>, ?> enrichMisissingPublicationIsRelatedTo = new EnrichMissingPublicationIsRelatedTo(); - private static final UpdateMatcher>, ?> enrichMissingPublicationIsReferencedBy = new EnrichMissingPublicationIsReferencedBy(); - private static final UpdateMatcher>, ?> enrichMissingPublicationReferences = new EnrichMissingPublicationReferences(); - private static final UpdateMatcher>, ?> enrichMissingPublicationIsSupplementedTo = new EnrichMissingPublicationIsSupplementedTo(); - private static final UpdateMatcher>, ?> enrichMissingPublicationIsSupplementedBy = new EnrichMissingPublicationIsSupplementedBy(); - - private static final UpdateMatcher>, ?> enrichMisissingDatasetIsRelatedTo = new EnrichMissingDatasetIsRelatedTo(); - private static final UpdateMatcher>, ?> enrichMissingDatasetIsReferencedBy = new EnrichMissingDatasetIsReferencedBy(); - private static final UpdateMatcher>, ?> enrichMissingDatasetReferences = new EnrichMissingDatasetReferences(); - private static final UpdateMatcher>, ?> enrichMissingDatasetIsSupplementedTo = new EnrichMissingDatasetIsSupplementedTo(); - private static final UpdateMatcher>, ?> enrichMissingDatasetIsSupplementedBy = new EnrichMissingDatasetIsSupplementedBy(); - - // Aggregators - private static final TypedColumn, ResultGroup> resultAggrTypedColumn = new ResultAggregator() - .toColumn(); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); public static void main(final String[] args) throws Exception { @@ -113,7 +53,7 @@ public class GenerateEventsApplication { IOUtils .toString( GenerateEventsApplication.class - .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json"))); + .getResourceAsStream("/eu/dnetlib/dhp/broker/oa/generate_broker_events.json"))); parser.parseArgument(args); final Boolean isSparkSessionManaged = Optional @@ -135,6 +75,8 @@ public class GenerateEventsApplication { log.info("dedupConfigProfileId: {}", dedupConfigProfileId); final SparkConf conf = new SparkConf(); + conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); + conf.registerKryoClasses(BrokerConstants.getModelClasses()); final DedupConfig dedupConfig = loadDedupConfig(isLookupUrl, dedupConfigProfileId); @@ -142,17 +84,16 @@ public class GenerateEventsApplication { removeOutputDir(spark, eventsPath); - final Dataset all = spark.emptyDataset(Encoders.kryo(Event.class)); - - for (final Class r1 : BrokerConstants.RESULT_CLASSES) { - all.union(generateSimpleEvents(spark, graphPath, r1, dedupConfig)); - - for (final Class r2 : BrokerConstants.RESULT_CLASSES) { - all.union(generateRelationEvents(spark, graphPath, r1, r2, dedupConfig)); - } - } - - all.write().mode(SaveMode.Overwrite).json(eventsPath); + spark + .emptyDataset(Encoders.kryo(Event.class)) + .union(generateEvents(spark, graphPath, Publication.class, dedupConfig)) + .union(generateEvents(spark, graphPath, eu.dnetlib.dhp.schema.oaf.Dataset.class, dedupConfig)) + .union(generateEvents(spark, graphPath, Software.class, dedupConfig)) + .union(generateEvents(spark, graphPath, OtherResearchProduct.class, dedupConfig)) + .write() + .mode(SaveMode.Overwrite) + .option("compression", "gzip") + .json(eventsPath); }); } @@ -161,203 +102,86 @@ public class GenerateEventsApplication { HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); } - private static Dataset generateSimpleEvents(final SparkSession spark, + private static Dataset generateEvents( + final SparkSession spark, final String graphPath, - final Class resultClazz, + final Class sourceClass, final DedupConfig dedupConfig) { - final Dataset results = readPath( - spark, graphPath + "/" + resultClazz.getSimpleName().toLowerCase(), Result.class) - .filter(r -> r.getDataInfo().getDeletedbyinference()); + final Dataset results = expandResultsWithRelations(spark, graphPath, sourceClass); final Dataset mergedRels = readPath(spark, graphPath + "/relation", Relation.class) .filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)); + final TypedColumn, ResultGroup> aggr = new ResultAggregator() + .toColumn(); + return results - .joinWith(mergedRels, results.col("id").equalTo(mergedRels.col("source")), "inner") - .groupByKey((MapFunction, String>) t -> t._2.getTarget(), Encoders.STRING()) - .agg(resultAggrTypedColumn) + .joinWith(mergedRels, results.col("result.id").equalTo(mergedRels.col("source")), "inner") + .groupByKey( + (MapFunction, String>) t -> t._2.getTarget(), Encoders.STRING()) + .agg(aggr) .map((MapFunction, ResultGroup>) t -> t._2, Encoders.kryo(ResultGroup.class)) .filter(ResultGroup::isValid) .map( - (MapFunction) g -> GenerateEventsApplication - .generateSimpleEvents(g, dedupConfig), + (MapFunction) g -> EventFinder.generateEvents(g, dedupConfig), Encoders.kryo(EventGroup.class)) .flatMap(group -> group.getData().iterator(), Encoders.kryo(Event.class)); } - private static EventGroup generateSimpleEvents(final ResultGroup results, final DedupConfig dedupConfig) { - final List> list = new ArrayList<>(); - - for (final Result target : results.getData()) { - list.addAll(enrichMissingAbstract.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMissingAuthorOrcid.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMissingOpenAccess.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMissingPid.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMissingPublicationDate.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMissingSubject.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMoreOpenAccess.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMorePid.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - list.addAll(enrichMoreSubject.searchUpdatesForRecord(target, results.getData(), dedupConfig)); - } - - final EventGroup events = new EventGroup(); - list.stream().map(EventFactory::newBrokerEvent).forEach(events::addElement); - return events; - } - - private static Dataset generateRelationEvents( + private static Dataset expandResultsWithRelations( final SparkSession spark, final String graphPath, - final Class sourceClass, - final Class targetClass, - final DedupConfig dedupConfig) { - - final Dataset sources = readPath( - spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), Result.class) - .filter(r -> r.getDataInfo().getDeletedbyinference()); - - final Dataset targets = readPath( - spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), targetClass); - - final Dataset mergedRels = readPath(spark, graphPath + "/relation", Relation.class) - .filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)); + final Class sourceClass) { + final Dataset projects = readPath(spark, graphPath + "/project", Project.class); + final Dataset datasets = readPath( + spark, graphPath + "/dataset", eu.dnetlib.dhp.schema.oaf.Dataset.class); + final Dataset softwares = readPath(spark, graphPath + "/software", Software.class); + final Dataset publications = readPath(spark, graphPath + "/publication", Publication.class); final Dataset rels = readPath(spark, graphPath + "/relation", Relation.class) - .filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)); + .filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS)) + .cache(); - final Dataset duplicates = sources - .joinWith(mergedRels, sources.col("id").equalTo(rels.col("source")), "inner") - .groupByKey((MapFunction, String>) t -> t._2.getTarget(), Encoders.STRING()) - .agg(resultAggrTypedColumn) - .map((MapFunction, ResultGroup>) t -> t._2, Encoders.kryo(ResultGroup.class)) - .filter(ResultGroup::isValid); + final Dataset r0 = readPath( + spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), Result.class) + .filter(r -> r.getDataInfo().getDeletedbyinference()) + .map(ConversionUtils::oafResultToBrokerResult, Encoders.kryo(OpenaireBrokerResult.class)); - if (targetClass == Project.class) { - // TODO join using: generateProjectsEvents - } else if (targetClass == Software.class) { - // TODO join using: generateSoftwareEvents - } else if (targetClass == Publication.class) { - // TODO join using: generatePublicationRelatedEvents - } else if (targetClass == eu.dnetlib.dhp.schema.oaf.Dataset.class) { - // TODO join using: generateDatasetRelatedEvents - } + final Dataset r1 = join(r0, rels, relatedEntities(projects, rels, RelatedProject.class)); + final Dataset r2 = join(r1, rels, relatedEntities(softwares, rels, RelatedProject.class)); + final Dataset r3 = join(r2, rels, relatedEntities(datasets, rels, RelatedProject.class)); + final Dataset r4 = join( + r3, rels, relatedEntities(publications, rels, RelatedProject.class)); + ; - return null; + return r4; } - private List generateProjectsEvents(final Collection>> childrenWithProjects, - final DedupConfig dedupConfig) { - final List> list = new ArrayList<>(); - - for (final Pair> target : childrenWithProjects) { - list.addAll(enrichMissingProject.searchUpdatesForRecord(target, childrenWithProjects, dedupConfig)); - list.addAll(enrichMoreProject.searchUpdatesForRecord(target, childrenWithProjects, dedupConfig)); - } - - return list.stream().map(EventFactory::newBrokerEvent).collect(Collectors.toList()); + private static Dataset relatedEntities(final Dataset targets, + final Dataset rels, + final Class clazz) { + return rels + .joinWith(targets, targets.col("id").equalTo(rels.col("target")), "inner") + .map( + t -> RelatedEntityFactory.newRelatedEntity(t._1.getSource(), t._1.getRelType(), t._2, clazz), + Encoders.kryo(clazz)); } - private List generateSoftwareEvents(final Collection>> childrenWithSoftwares, - final DedupConfig dedupConfig) { - final List> list = new ArrayList<>(); + private static Dataset join(final Dataset sources, + final Dataset rels, + final Dataset typedRels) { - for (final Pair> target : childrenWithSoftwares) { - list.addAll(enrichMissingSoftware.searchUpdatesForRecord(target, childrenWithSoftwares, dedupConfig)); - list.addAll(enrichMoreSoftware.searchUpdatesForRecord(target, childrenWithSoftwares, dedupConfig)); - } - return list.stream().map(EventFactory::newBrokerEvent).collect(Collectors.toList()); - } - - private List generatePublicationRelatedEvents(final String relType, - final Collection>>> childrenWithRels, - final DedupConfig dedupConfig) { - - final List> list = new ArrayList<>(); - - final List>> cleanedChildrens = childrenWithRels - .stream() - .filter(p -> p.getRight().containsKey(relType)) - .map(p -> Pair.of(p.getLeft(), p.getRight().get(relType))) - .filter(p -> p.getRight().size() > 0) - .collect(Collectors.toList()); - - for (final Pair> target : cleanedChildrens) { - if (relType.equals("isRelatedTo")) { - list - .addAll( - enrichMisissingPublicationIsRelatedTo - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("references")) { - list - .addAll( - enrichMissingPublicationReferences - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isReferencedBy")) { - list - .addAll( - enrichMissingPublicationIsReferencedBy - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isSupplementedTo")) { - list - .addAll( - enrichMissingPublicationIsSupplementedTo - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isSupplementedBy")) { - list - .addAll( - enrichMissingPublicationIsSupplementedBy - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } - } - - return list.stream().map(EventFactory::newBrokerEvent).collect(Collectors.toList()); - - } - - private List generateDatasetRelatedEvents(final String relType, - final Collection>>> childrenWithRels, - final DedupConfig dedupConfig) { - - final List> list = new ArrayList<>(); - - final List>> cleanedChildrens = childrenWithRels - .stream() - .filter(p -> p.getRight().containsKey(relType)) - .map(p -> Pair.of(p.getLeft(), p.getRight().get(relType))) - .filter(p -> p.getRight().size() > 0) - .collect(Collectors.toList()); - - for (final Pair> target : cleanedChildrens) { - if (relType.equals("isRelatedTo")) { - list - .addAll( - enrichMisissingDatasetIsRelatedTo - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("references")) { - list - .addAll( - enrichMissingDatasetReferences.searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isReferencedBy")) { - list - .addAll( - enrichMissingDatasetIsReferencedBy - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isSupplementedTo")) { - list - .addAll( - enrichMissingDatasetIsSupplementedTo - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } else if (relType.equals("isSupplementedBy")) { - list - .addAll( - enrichMissingDatasetIsSupplementedBy - .searchUpdatesForRecord(target, cleanedChildrens, dedupConfig)); - } - } - - return list.stream().map(EventFactory::newBrokerEvent).collect(Collectors.toList()); + final TypedColumn, OpenaireBrokerResult> aggr = new OpenaireBrokerResultAggregator() + .toColumn(); + ; + return sources + .joinWith(typedRels, sources.col("result.id").equalTo(rels.col("source")), "left_outer") + .groupByKey( + (MapFunction, String>) t -> t._1.getOpenaireId(), Encoders.STRING()) + .agg(aggr) + .map(t -> t._2, Encoders.kryo(OpenaireBrokerResult.class)); } public static Dataset readPath( @@ -386,7 +210,6 @@ public class GenerateEventsApplication { // dedupConfig.getWf().setConfigurationId("???"); return dedupConfig; - } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/UpdateMatcher.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/UpdateMatcher.java index 95d43ae68..13aeefb2f 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/UpdateMatcher.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/UpdateMatcher.java @@ -6,30 +6,45 @@ import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Function; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.StringUtils; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Field; import eu.dnetlib.pace.config.DedupConfig; -public abstract class UpdateMatcher { +public abstract class UpdateMatcher { private final boolean multipleUpdate; + private final Function topicFunction; + private final BiConsumer compileHighlightFunction; + private final Function highlightToStringFunction; - public UpdateMatcher(final boolean multipleUpdate) { + public UpdateMatcher(final boolean multipleUpdate, final Function topicFunction, + final BiConsumer compileHighlightFunction, + final Function highlightToStringFunction) { this.multipleUpdate = multipleUpdate; + this.topicFunction = topicFunction; + this.compileHighlightFunction = compileHighlightFunction; + this.highlightToStringFunction = highlightToStringFunction; } - public Collection> searchUpdatesForRecord(final K res, final Collection others, + public Collection> searchUpdatesForRecord(final OpenaireBrokerResult res, + final Collection others, final DedupConfig dedupConfig) { final Map> infoMap = new HashMap<>(); - for (final K source : others) { + for (final OpenaireBrokerResult source : others) { if (source != res) { - for (final UpdateInfo info : findUpdates(source, res, dedupConfig)) { + for (final T hl : findDifferences(source, res)) { + final Topic topic = getTopicFunction().apply(hl); + final UpdateInfo info = new UpdateInfo<>(topic, hl, source, res, getCompileHighlightFunction(), + getHighlightToStringFunction(), dedupConfig); final String s = DigestUtils.md5Hex(info.getHighlightValueAsString()); if (!infoMap.containsKey(s) || infoMap.get(s).getTrust() < info.getTrust()) { } else { @@ -53,14 +68,30 @@ public abstract class UpdateMatcher { } } - protected abstract List> findUpdates(K source, K target, DedupConfig dedupConfig); + protected abstract List findDifferences(OpenaireBrokerResult source, OpenaireBrokerResult target); - protected static boolean isMissing(final List> list) { - return list == null || list.isEmpty() || StringUtils.isBlank(list.get(0).getValue()); + protected static boolean isMissing(final List list) { + return list == null || list.isEmpty() || StringUtils.isBlank(list.get(0)); } - protected boolean isMissing(final Field field) { - return field == null || StringUtils.isBlank(field.getValue()); + protected boolean isMissing(final String field) { + return StringUtils.isBlank(field); + } + + public boolean isMultipleUpdate() { + return multipleUpdate; + } + + public Function getTopicFunction() { + return topicFunction; + } + + public BiConsumer getCompileHighlightFunction() { + return compileHighlightFunction; + } + + public Function getHighlightToStringFunction() { + return highlightToStringFunction; } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/AbstractEnrichMissingDataset.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/AbstractEnrichMissingDataset.java index 3cf7b18f9..7a58f986b 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/AbstractEnrichMissingDataset.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/AbstractEnrichMissingDataset.java @@ -5,62 +5,41 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.Dataset; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Dataset; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; public abstract class AbstractEnrichMissingDataset - extends UpdateMatcher>, eu.dnetlib.broker.objects.Dataset> { - - private final Topic topic; + extends UpdateMatcher { public AbstractEnrichMissingDataset(final Topic topic) { - super(true); - this.topic = topic; + super(true, + rel -> topic, + (p, rel) -> p.getDatasets().add(rel), + rel -> rel.getOriginalId()); } + protected abstract boolean filterByType(String relType); + @Override - protected final List> findUpdates( - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { + protected final List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set existingDatasets = target - .getRight() + .getDatasets() .stream() - .map(Dataset::getId) + .filter(rel -> filterByType(rel.getRelType())) + .map(Dataset::getOriginalId) .collect(Collectors.toSet()); return source - .getRight() + .getDatasets() .stream() - .filter(d -> !existingDatasets.contains(d.getId())) - .map(ConversionUtils::oafDatasetToBrokerDataset) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(rel -> filterByType(rel.getRelType())) + .filter(d -> !existingDatasets.contains(d.getOriginalId())) .collect(Collectors.toList()); } - protected final UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Dataset highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - getTopic(), - highlightValue, source.getLeft(), target.getLeft(), - (p, rel) -> p.getDatasets().add(rel), - rel -> rel.getInstances().get(0).getUrl(), - dedupConfig); - } - - public Topic getTopic() { - return topic; - } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsReferencedBy.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsReferencedBy.java index 74ce761f4..21786687e 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsReferencedBy.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsReferencedBy.java @@ -9,4 +9,9 @@ public class EnrichMissingDatasetIsReferencedBy extends AbstractEnrichMissingDat super(Topic.ENRICH_MISSING_DATASET_IS_REFERENCED_BY); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isReferencedBy"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsRelatedTo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsRelatedTo.java index 05a891059..0f3739434 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsRelatedTo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsRelatedTo.java @@ -9,4 +9,9 @@ public class EnrichMissingDatasetIsRelatedTo extends AbstractEnrichMissingDatase super(Topic.ENRICH_MISSING_DATASET_IS_RELATED_TO); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isRelatedTo"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedBy.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedBy.java index 23bd68fa1..cde227fee 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedBy.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedBy.java @@ -9,4 +9,9 @@ public class EnrichMissingDatasetIsSupplementedBy extends AbstractEnrichMissingD super(Topic.ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_BY); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isSupplementedBy"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedTo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedTo.java index 03160b6f0..750165ff5 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedTo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetIsSupplementedTo.java @@ -9,4 +9,9 @@ public class EnrichMissingDatasetIsSupplementedTo extends AbstractEnrichMissingD super(Topic.ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_TO); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isSupplementedTo"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetReferences.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetReferences.java index bf1df053d..b1c0afe16 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetReferences.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedDatasets/EnrichMissingDatasetReferences.java @@ -9,4 +9,9 @@ public class EnrichMissingDatasetReferences extends AbstractEnrichMissingDataset super(Topic.ENRICH_MISSING_DATASET_REFERENCES); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("references"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMissingProject.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMissingProject.java index 22817a25d..fa5fde725 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMissingProject.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMissingProject.java @@ -1,54 +1,29 @@ package eu.dnetlib.dhp.broker.oa.matchers.relatedProjects; -import java.util.Arrays; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.tuple.Pair; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.Project; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Project; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingProject - extends UpdateMatcher>, eu.dnetlib.broker.objects.Project> { +public class EnrichMissingProject extends UpdateMatcher { public EnrichMissingProject() { - super(true); + super(true, + prj -> Topic.ENRICH_MISSING_PROJECT, + (p, prj) -> p.getProjects().add(prj), + prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode()); } @Override - protected List> findUpdates(final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - - if (source.getRight().isEmpty()) { - return Arrays.asList(); + protected List findDifferences(final OpenaireBrokerResult source, final OpenaireBrokerResult target) { + if (target.getProjects().isEmpty()) { + return source.getProjects(); } else { - return target - .getRight() - .stream() - .map(ConversionUtils::oafProjectToBrokerProject) - .map(p -> generateUpdateInfo(p, source, target, dedupConfig)) - .collect(Collectors.toList()); + return new ArrayList<>(); } } - - public UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Project highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_PROJECT, - highlightValue, source.getLeft(), target.getLeft(), - (p, prj) -> p.getProjects().add(prj), - prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode(), dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMoreProject.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMoreProject.java index 016bdd283..ca63aeb49 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMoreProject.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedProjects/EnrichMoreProject.java @@ -5,52 +5,39 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.Project; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Project; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMoreProject extends UpdateMatcher>, eu.dnetlib.broker.objects.Project> { +public class EnrichMoreProject extends UpdateMatcher { public EnrichMoreProject() { - super(true); + super(true, + prj -> Topic.ENRICH_MORE_PROJECT, + (p, prj) -> p.getProjects().add(prj), + prj -> projectAsString(prj)); + } + + private static String projectAsString(final Project prj) { + return prj.getFunder() + "::" + prj.getFundingProgram() + "::" + prj.getCode(); } @Override - protected List> findUpdates(final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { - final Set existingProjects = source - .getRight() + final Set existingProjects = target + .getProjects() .stream() - .map(Project::getId) + .map(EnrichMoreProject::projectAsString) .collect(Collectors.toSet()); - return target - .getRight() + return source + .getProjects() .stream() - .filter(p -> !existingProjects.contains(p.getId())) - .map(ConversionUtils::oafProjectToBrokerProject) - .map(p -> generateUpdateInfo(p, source, target, dedupConfig)) + .filter(p -> !existingProjects.contains(projectAsString(p))) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Project highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MORE_PROJECT, - highlightValue, source.getLeft(), target.getLeft(), - (p, prj) -> p.getProjects().add(prj), - prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode(), dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/AbstractEnrichMissingPublication.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/AbstractEnrichMissingPublication.java index ec575e68d..300863949 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/AbstractEnrichMissingPublication.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/AbstractEnrichMissingPublication.java @@ -5,61 +5,41 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.Publication; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Publication; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public abstract class AbstractEnrichMissingPublication - extends UpdateMatcher>, eu.dnetlib.broker.objects.Publication> { - - private final Topic topic; +public abstract class AbstractEnrichMissingPublication extends UpdateMatcher { public AbstractEnrichMissingPublication(final Topic topic) { - super(true); - this.topic = topic; + super(true, + rel -> topic, + (p, rel) -> p.getPublications().add(rel), + rel -> rel.getOriginalId()); + } + protected abstract boolean filterByType(String relType); + @Override - protected final List> findUpdates( - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { + protected final List findDifferences( + final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set existingPublications = target - .getRight() + .getPublications() .stream() - .map(Publication::getId) + .filter(rel -> filterByType(rel.getRelType())) + .map(Publication::getOriginalId) .collect(Collectors.toSet()); return source - .getRight() + .getPublications() .stream() - .filter(d -> !existingPublications.contains(d.getId())) - .map(ConversionUtils::oafResultToBrokerPublication) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(rel -> filterByType(rel.getRelType())) + .filter(p -> !existingPublications.contains(p.getOriginalId())) .collect(Collectors.toList()); - } - protected final UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Publication highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - getTopic(), - highlightValue, source.getLeft(), target.getLeft(), - (p, rel) -> p.getPublications().add(rel), - rel -> rel.getInstances().get(0).getUrl(), dedupConfig); - } - - public Topic getTopic() { - return topic; - } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsReferencedBy.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsReferencedBy.java index 73fa8a45f..eebb5c1a6 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsReferencedBy.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsReferencedBy.java @@ -9,4 +9,8 @@ public class EnrichMissingPublicationIsReferencedBy extends AbstractEnrichMissin super(Topic.ENRICH_MISSING_PUBLICATION_IS_REFERENCED_BY); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isReferencedBy"); + } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsRelatedTo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsRelatedTo.java index 361ea3b34..a8aa550d4 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsRelatedTo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsRelatedTo.java @@ -9,4 +9,9 @@ public class EnrichMissingPublicationIsRelatedTo extends AbstractEnrichMissingPu super(Topic.ENRICH_MISSING_PUBLICATION_IS_RELATED_TO); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isRelatedTo"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedBy.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedBy.java index 7e8863b1e..762ac942e 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedBy.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedBy.java @@ -9,4 +9,8 @@ public class EnrichMissingPublicationIsSupplementedBy extends AbstractEnrichMiss super(Topic.ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_BY); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isSupplementedBy"); + } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedTo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedTo.java index dc4e51377..fc7196a01 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedTo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationIsSupplementedTo.java @@ -9,4 +9,9 @@ public class EnrichMissingPublicationIsSupplementedTo extends AbstractEnrichMiss super(Topic.ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_TO); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("isSupplementedTo"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationReferences.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationReferences.java index 5198098bc..da1994454 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationReferences.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedPublications/EnrichMissingPublicationReferences.java @@ -9,4 +9,9 @@ public class EnrichMissingPublicationReferences extends AbstractEnrichMissingPub super(Topic.ENRICH_MISSING_PUBLICATION_REFERENCES); } + @Override + protected boolean filterByType(final String relType) { + return relType.equals("references"); + } + } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMissingSoftware.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMissingSoftware.java index 699d546ec..76ae061e6 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMissingSoftware.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMissingSoftware.java @@ -1,55 +1,33 @@ package eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware; -import java.util.Arrays; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.tuple.Pair; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.Software; -import eu.dnetlib.pace.config.DedupConfig; public class EnrichMissingSoftware - extends UpdateMatcher>, eu.dnetlib.broker.objects.Software> { + extends UpdateMatcher { public EnrichMissingSoftware() { - super(true); + super(true, + s -> Topic.ENRICH_MISSING_SOFTWARE, + (p, s) -> p.getSoftwares().add(s), + s -> s.getName()); } @Override - protected List> findUpdates( - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { + protected List findDifferences( + final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { - if (source.getRight().isEmpty()) { - return Arrays.asList(); + if (target.getSoftwares().isEmpty()) { + return source.getSoftwares(); } else { - return target - .getRight() - .stream() - .map(ConversionUtils::oafSoftwareToBrokerSoftware) - .map(p -> generateUpdateInfo(p, source, target, dedupConfig)) - .collect(Collectors.toList()); + return new ArrayList<>(); } } - public UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Software highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_SOFTWARE, - highlightValue, source.getLeft(), target.getLeft(), - (p, s) -> p.getSoftwares().add(s), - s -> s.getName(), dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMoreSoftware.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMoreSoftware.java index 45631df20..ebd421b8e 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMoreSoftware.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/relatedSoftware/EnrichMoreSoftware.java @@ -5,54 +5,36 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.Software; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.Software; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMoreSoftware - extends UpdateMatcher>, eu.dnetlib.broker.objects.Software> { +public class EnrichMoreSoftware extends UpdateMatcher { public EnrichMoreSoftware() { - super(true); + super(true, + s -> Topic.ENRICH_MORE_SOFTWARE, + (p, s) -> p.getSoftwares().add(s), + s -> s.getName()); } @Override - protected List> findUpdates( - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { + protected List findDifferences( + final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set existingSoftwares = source - .getRight() + .getSoftwares() .stream() - .map(Software::getId) + .map(Software::getName) .collect(Collectors.toSet()); return target - .getRight() + .getSoftwares() .stream() - .filter(p -> !existingSoftwares.contains(p.getId())) - .map(ConversionUtils::oafSoftwareToBrokerSoftware) - .map(p -> generateUpdateInfo(p, source, target, dedupConfig)) + .filter(p -> !existingSoftwares.contains(p.getName())) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo( - final eu.dnetlib.broker.objects.Software highlightValue, - final Pair> source, - final Pair> target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MORE_SOFTWARE, - highlightValue, source.getLeft(), target.getLeft(), - (p, s) -> p.getSoftwares().add(s), - s -> s.getName(), dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAbstract.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAbstract.java index 7dc340b3c..b2cbbce2c 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAbstract.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAbstract.java @@ -5,37 +5,26 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingAbstract extends UpdateMatcher { +public class EnrichMissingAbstract extends UpdateMatcher { public EnrichMissingAbstract() { - super(false); + super(false, + s -> Topic.ENRICH_MISSING_ABSTRACT, + (p, s) -> p.getAbstracts().add(s), + s -> s); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { - if (isMissing(target.getDescription()) && !isMissing(source.getDescription())) { - return Arrays - .asList(generateUpdateInfo(source.getDescription().get(0).getValue(), source, target, dedupConfig)); + protected List findDifferences(final OpenaireBrokerResult source, final OpenaireBrokerResult target) { + if (isMissing(target.getAbstracts()) && !isMissing(source.getAbstracts())) { + return Arrays.asList(source.getAbstracts().get(0)); + } else { + return new ArrayList<>(); } - return new ArrayList<>(); - } - - public UpdateInfo generateUpdateInfo(final String highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_ABSTRACT, - highlightValue, source, target, - (p, s) -> p.getAbstracts().add(s), - s -> s, dedupConfig); } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAuthorOrcid.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAuthorOrcid.java index 7a1677ae2..c4b96e67b 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAuthorOrcid.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingAuthorOrcid.java @@ -1,40 +1,43 @@ package eu.dnetlib.dhp.broker.oa.matchers.simple; -import java.util.Arrays; import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.StringUtils; +import eu.dnetlib.broker.objects.Author; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingAuthorOrcid extends UpdateMatcher> { +public class EnrichMissingAuthorOrcid extends UpdateMatcher { public EnrichMissingAuthorOrcid() { - super(true); + super(true, + aut -> Topic.ENRICH_MISSING_AUTHOR_ORCID, + (p, aut) -> p.getCreators().add(aut), + aut -> aut.getOrcid()); } @Override - protected List>> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { - // TODO - // return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f)); - return Arrays.asList(); - } + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { + + final Set existingOrcids = target + .getCreators() + .stream() + .map(Author::getOrcid) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toSet()); + + return source + .getCreators() + .stream() + .filter(a -> StringUtils.isNotBlank(a.getOrcid())) + .filter(a -> !existingOrcids.contains(a.getOrcid())) + .collect(Collectors.toList()); - public UpdateInfo> generateUpdateInfo(final Pair highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_AUTHOR_ORCID, - highlightValue, source, target, - (p, pair) -> p.getCreators().add(pair.getLeft() + " - ORCID: " + pair.getRight()), - pair -> pair.getLeft() + "::" + pair.getRight(), - dedupConfig); } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingOpenAccess.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingOpenAccess.java index d14490ba8..e870cf1fa 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingOpenAccess.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingOpenAccess.java @@ -6,27 +6,27 @@ import java.util.List; import java.util.stream.Collectors; import eu.dnetlib.broker.objects.Instance; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; import eu.dnetlib.dhp.broker.oa.util.BrokerConstants; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingOpenAccess extends UpdateMatcher { +public class EnrichMissingOpenAccess extends UpdateMatcher { public EnrichMissingOpenAccess() { - super(true); + super(true, + i -> Topic.ENRICH_MISSING_OA_VERSION, + (p, i) -> p.getInstances().add(i), + Instance::getUrl); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final long count = target - .getInstance() + .getInstances() .stream() - .map(i -> i.getAccessright().getClassid()) + .map(Instance::getLicense) .filter(right -> right.equals(BrokerConstants.OPEN_ACCESS)) .count(); @@ -35,24 +35,10 @@ public class EnrichMissingOpenAccess extends UpdateMatcher { } return source - .getInstance() + .getInstances() .stream() - .filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS)) - .map(ConversionUtils::oafInstanceToBrokerInstances) - .flatMap(List::stream) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS)) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo(final Instance highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_OA_VERSION, - highlightValue, source, target, - (p, i) -> p.getInstances().add(i), - Instance::getUrl, dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPid.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPid.java index 20303ec1b..cc72d9fa9 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPid.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPid.java @@ -5,44 +5,33 @@ import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import eu.dnetlib.broker.objects.Pid; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.TypedValue; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingPid extends UpdateMatcher { +public class EnrichMissingPid extends UpdateMatcher { public EnrichMissingPid() { - super(true); + super(true, + pid -> Topic.ENRICH_MISSING_PID, + (p, pid) -> p.getPids().add(pid), + pid -> pid.getType() + "::" + pid.getValue()); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { - final long count = target.getPid().size(); + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { + final long count = target.getPids().size(); if (count > 0) { return Arrays.asList(); } return source - .getPid() + .getPids() .stream() - .map(ConversionUtils::oafPidToBrokerPid) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo(final Pid highlightValue, final Result source, final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_PID, - highlightValue, source, target, - (p, pid) -> p.getPids().add(pid), - pid -> pid.getType() + "::" + pid.getValue(), dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPublicationDate.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPublicationDate.java index e1de8ce4d..ed8c26b5a 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPublicationDate.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingPublicationDate.java @@ -5,37 +5,28 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingPublicationDate extends UpdateMatcher { +public class EnrichMissingPublicationDate extends UpdateMatcher { public EnrichMissingPublicationDate() { - super(false); + super(false, + date -> Topic.ENRICH_MISSING_PUBLICATION_DATE, + (p, date) -> p.setPublicationdate(date), + s -> s); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { - if (isMissing(target.getDateofacceptance()) && !isMissing(source.getDateofacceptance())) { - return Arrays - .asList(generateUpdateInfo(source.getDateofacceptance().getValue(), source, target, dedupConfig)); - } - return new ArrayList<>(); - } + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { - public UpdateInfo generateUpdateInfo(final String highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MISSING_PUBLICATION_DATE, - highlightValue, source, target, - (p, date) -> p.setPublicationdate(date), - s -> s, dedupConfig); + if (isMissing(target.getPublicationdate()) && !isMissing(source.getPublicationdate())) { + return Arrays.asList(source.getPublicationdate()); + } else { + return new ArrayList<>(); + } } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingSubject.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingSubject.java index c51f8991c..07b1fa41a 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingSubject.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMissingSubject.java @@ -5,52 +5,38 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.TypedValue; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Qualifier; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.StructuredProperty; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMissingSubject extends UpdateMatcher> { +public class EnrichMissingSubject extends UpdateMatcher { public EnrichMissingSubject() { - super(true); + super(true, + s -> Topic.fromPath("ENRICH/MISSING/SUBJECT/" + s.getType()), + (p, s) -> p.getSubjects().add(s), + s -> subjectAsString(s)); } @Override - protected List>> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { - final Set existingTypes = target - .getSubject() + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { + final Set existingSubject = target + .getSubjects() .stream() - .map(StructuredProperty::getQualifier) - .map(Qualifier::getClassid) + .map(s -> subjectAsString(s)) .collect(Collectors.toSet()); return source - .getPid() + .getSubjects() .stream() - .filter(pid -> !existingTypes.contains(pid.getQualifier().getClassid())) - .map(ConversionUtils::oafSubjectToPair) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(s -> !existingSubject.contains(subjectAsString(s))) .collect(Collectors.toList()); } - public UpdateInfo> generateUpdateInfo(final Pair highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - - return new UpdateInfo<>( - Topic.fromPath("ENRICH/MISSING/SUBJECT/" + highlightValue.getLeft()), - highlightValue, source, target, - (p, pair) -> p.getSubjects().add(pair.getRight()), - pair -> pair.getLeft() + "::" + pair.getRight(), dedupConfig); + private static String subjectAsString(final TypedValue s) { + return s.getType() + "::" + s.getValue(); } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreOpenAccess.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreOpenAccess.java index 2ac04fd12..bfef3ee4f 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreOpenAccess.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreOpenAccess.java @@ -6,51 +6,36 @@ import java.util.Set; import java.util.stream.Collectors; import eu.dnetlib.broker.objects.Instance; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; import eu.dnetlib.dhp.broker.oa.util.BrokerConstants; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMoreOpenAccess extends UpdateMatcher { +public class EnrichMoreOpenAccess extends UpdateMatcher { public EnrichMoreOpenAccess() { - super(true); + super(true, + i -> Topic.ENRICH_MORE_OA_VERSION, + (p, i) -> p.getInstances().add(i), + Instance::getUrl); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set urls = target - .getInstance() + .getInstances() .stream() - .filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS)) + .filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS)) .map(i -> i.getUrl()) - .flatMap(List::stream) .collect(Collectors.toSet()); return source - .getInstance() + .getInstances() .stream() - .filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS)) - .map(ConversionUtils::oafInstanceToBrokerInstances) - .flatMap(List::stream) + .filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS)) .filter(i -> !urls.contains(i.getUrl())) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo(final Instance highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MORE_OA_VERSION, - highlightValue, source, target, - (p, i) -> p.getInstances().add(i), - Instance::getUrl, dedupConfig); - } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMorePid.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMorePid.java index e4bf5d2c2..d1f2e6022 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMorePid.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMorePid.java @@ -5,45 +5,37 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import eu.dnetlib.broker.objects.Pid; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.TypedValue; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMorePid extends UpdateMatcher { +public class EnrichMorePid extends UpdateMatcher { public EnrichMorePid() { - super(true); + super(true, + pid -> Topic.ENRICH_MORE_PID, + (p, pid) -> p.getPids().add(pid), + pid -> pidAsString(pid)); } @Override - protected List> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set existingPids = target - .getPid() + .getPids() .stream() - .map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue()) + .map(pid -> pidAsString(pid)) .collect(Collectors.toSet()); return source - .getPid() + .getPids() .stream() - .filter(pid -> !existingPids.contains(pid.getQualifier().getClassid() + "::" + pid.getValue())) - .map(ConversionUtils::oafPidToBrokerPid) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(pid -> !existingPids.contains(pidAsString(pid))) .collect(Collectors.toList()); } - public UpdateInfo generateUpdateInfo(final Pid highlightValue, final Result source, final Result target, - final DedupConfig dedupConfig) { - return new UpdateInfo<>( - Topic.ENRICH_MORE_PID, - highlightValue, source, target, - (p, pid) -> p.getPids().add(pid), - pid -> pid.getType() + "::" + pid.getValue(), dedupConfig); + private static String pidAsString(final TypedValue pid) { + return pid.getType() + "::" + pid.getValue(); } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreSubject.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreSubject.java index d6e607c31..39225e8ab 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreSubject.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/matchers/simple/EnrichMoreSubject.java @@ -5,49 +5,37 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.Pair; - +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.TypedValue; import eu.dnetlib.dhp.broker.model.Topic; import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; -import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; -import eu.dnetlib.dhp.broker.oa.util.UpdateInfo; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.pace.config.DedupConfig; -public class EnrichMoreSubject extends UpdateMatcher> { +public class EnrichMoreSubject extends UpdateMatcher { public EnrichMoreSubject() { - super(true); + super(true, + s -> Topic.fromPath("ENRICH/MORE/SUBJECT/" + s.getType()), + (p, s) -> p.getSubjects().add(s), + s -> subjectAsString(s)); } @Override - protected List>> findUpdates(final Result source, final Result target, - final DedupConfig dedupConfig) { + protected List findDifferences(final OpenaireBrokerResult source, + final OpenaireBrokerResult target) { final Set existingSubjects = target - .getSubject() + .getSubjects() .stream() - .map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue()) + .map(pid -> subjectAsString(pid)) .collect(Collectors.toSet()); return source - .getPid() + .getPids() .stream() - .filter(pid -> !existingSubjects.contains(pid.getQualifier().getClassid() + "::" + pid.getValue())) - .map(ConversionUtils::oafSubjectToPair) - .map(i -> generateUpdateInfo(i, source, target, dedupConfig)) + .filter(s -> !existingSubjects.contains(subjectAsString(s))) .collect(Collectors.toList()); } - public UpdateInfo> generateUpdateInfo(final Pair highlightValue, - final Result source, - final Result target, - final DedupConfig dedupConfig) { - - return new UpdateInfo<>( - Topic.fromPath("ENRICH/MORE/SUBJECT/" + highlightValue.getLeft()), - highlightValue, source, target, - (p, pair) -> p.getSubjects().add(pair.getRight()), - pair -> pair.getLeft() + "::" + pair.getRight(), dedupConfig); + private static String subjectAsString(final TypedValue s) { + return s.getType() + "::" + s.getValue(); } - } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/BrokerConstants.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/BrokerConstants.java index 0665c69dd..49c46c7f0 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/BrokerConstants.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/BrokerConstants.java @@ -2,13 +2,12 @@ package eu.dnetlib.dhp.broker.oa.util; import java.util.Arrays; -import java.util.List; +import java.util.HashSet; +import java.util.Set; -import eu.dnetlib.dhp.schema.oaf.Dataset; -import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct; -import eu.dnetlib.dhp.schema.oaf.Publication; -import eu.dnetlib.dhp.schema.oaf.Result; -import eu.dnetlib.dhp.schema.oaf.Software; +import eu.dnetlib.dhp.broker.model.Event; +import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup; +import eu.dnetlib.dhp.schema.common.ModelSupport; public class BrokerConstants { @@ -18,7 +17,11 @@ public class BrokerConstants { public static final float MIN_TRUST = 0.25f; public static final float MAX_TRUST = 1.00f; - public static final List> RESULT_CLASSES = Arrays - .asList(Publication.class, Dataset.class, Software.class, OtherResearchProduct.class); + public static Class[] getModelClasses() { + final Set> list = new HashSet<>(); + list.addAll(Arrays.asList(ModelSupport.getOafModelClasses())); + list.addAll(Arrays.asList(ResultGroup.class, Event.class)); + return list.toArray(new Class[] {}); + } } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ConversionUtils.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ConversionUtils.java index 2b39115b1..d04ef45a0 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ConversionUtils.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ConversionUtils.java @@ -13,7 +13,8 @@ import org.dom4j.DocumentHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import eu.dnetlib.broker.objects.Pid; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.broker.objects.TypedValue; import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.ExternalReference; @@ -41,8 +42,8 @@ public class ConversionUtils { }).collect(Collectors.toList()); } - public static Pid oafPidToBrokerPid(final StructuredProperty sp) { - return sp != null ? new Pid() + public static TypedValue oafPidToBrokerPid(final StructuredProperty sp) { + return sp != null ? new TypedValue() .setValue(sp.getValue()) .setType(sp.getQualifier().getClassid()) : null; } @@ -54,7 +55,7 @@ public class ConversionUtils { public static final eu.dnetlib.broker.objects.Dataset oafDatasetToBrokerDataset(final Dataset d) { return d != null ? new eu.dnetlib.broker.objects.Dataset() .setOriginalId(d.getOriginalId().get(0)) - .setTitles(structPropList(d.getTitle())) + .setTitle(structPropValue(d.getTitle())) .setPids(d.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList())) .setInstances( d @@ -63,26 +64,46 @@ public class ConversionUtils { .map(ConversionUtils::oafInstanceToBrokerInstances) .flatMap(List::stream) .collect(Collectors.toList())) - .setCollectedFrom(d.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList())) + .setCollectedFrom(d.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null)) : null; } - public static final eu.dnetlib.broker.objects.Publication oafResultToBrokerPublication(final Result result) { + public static eu.dnetlib.broker.objects.Publication oafPublicationToBrokerPublication(final Publication p) { + return p != null ? new eu.dnetlib.broker.objects.Publication() + .setOriginalId(p.getOriginalId().get(0)) + .setTitle(structPropValue(p.getTitle())) + .setPids(p.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList())) + .setInstances( + p + .getInstance() + .stream() + .map(ConversionUtils::oafInstanceToBrokerInstances) + .flatMap(List::stream) + .collect(Collectors.toList())) + .setCollectedFrom(p.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null)) + : null; + } - return result != null ? new eu.dnetlib.broker.objects.Publication() + public static final OpenaireBrokerResult oafResultToBrokerResult(final Result result) { + + return result != null ? new OpenaireBrokerResult() + .setOpenaireId(result.getId()) .setOriginalId(result.getOriginalId().get(0)) + .setTypology(result.getResulttype().getClassid()) .setTitles(structPropList(result.getTitle())) .setAbstracts(fieldList(result.getDescription())) .setLanguage(result.getLanguage().getClassid()) - .setSubjects(structPropList(result.getSubject())) - .setCreators(result.getAuthor().stream().map(Author::getFullname).collect(Collectors.toList())) - .setPublicationdate(result.getDateofcollection()) + .setSubjects(structPropTypedList(result.getSubject())) + .setCreators( + result.getAuthor().stream().map(ConversionUtils::oafAuthorToBrokerAuthor).collect(Collectors.toList())) + .setPublicationdate(result.getDateofacceptance().getValue()) .setPublisher(fieldValue(result.getPublisher())) .setEmbargoenddate(fieldValue(result.getEmbargoenddate())) .setContributor(fieldList(result.getContributor())) .setJournal( result instanceof Publication ? oafJournalToBrokerJournal(((Publication) result).getJournal()) : null) - .setCollectedFrom(result.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList())) + .setCollectedFromId(result.getCollectedfrom().stream().map(KeyValue::getKey).findFirst().orElse(null)) + .setCollectedFromName(result.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null)) .setPids(result.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList())) .setInstances( result @@ -100,6 +121,30 @@ public class ConversionUtils { : null; } + private static List structPropTypedList(final List list) { + return list + .stream() + .map( + p -> new TypedValue() + .setValue(p.getValue()) + .setType(p.getQualifier().getClassid())) + .collect(Collectors.toList()); + } + + private static eu.dnetlib.broker.objects.Author oafAuthorToBrokerAuthor(final Author author) { + return author != null ? new eu.dnetlib.broker.objects.Author() + .setFullname(author.getFullname()) + .setOrcid( + author + .getPid() + .stream() + .filter(pid -> pid.getQualifier().getClassid().equalsIgnoreCase("orcid")) + .map(pid -> pid.getValue()) + .findFirst() + .orElse(null)) + : null; + } + private static eu.dnetlib.broker.objects.Journal oafJournalToBrokerJournal(final Journal journal) { return journal != null ? new eu.dnetlib.broker.objects.Journal() .setName(journal.getName()) diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/EventFinder.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/EventFinder.java new file mode 100644 index 000000000..4c20ac5ca --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/EventFinder.java @@ -0,0 +1,86 @@ + +package eu.dnetlib.dhp.broker.oa.util; + +import java.util.ArrayList; +import java.util.List; + +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import eu.dnetlib.dhp.broker.model.EventFactory; +import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher; +import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsReferencedBy; +import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsRelatedTo; +import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedBy; +import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedTo; +import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetReferences; +import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMissingProject; +import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMoreProject; +import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsReferencedBy; +import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsRelatedTo; +import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedBy; +import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedTo; +import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationReferences; +import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMissingSoftware; +import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMoreSoftware; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAbstract; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAuthorOrcid; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingOpenAccess; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPid; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPublicationDate; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingSubject; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreOpenAccess; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMorePid; +import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreSubject; +import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup; +import eu.dnetlib.pace.config.DedupConfig; + +public class EventFinder { + + private static List> matchers = new ArrayList<>(); + static { + matchers.add(new EnrichMissingAbstract()); + matchers.add(new EnrichMissingAuthorOrcid()); + matchers.add(new EnrichMissingOpenAccess()); + matchers.add(new EnrichMissingPid()); + matchers.add(new EnrichMissingPublicationDate()); + matchers.add(new EnrichMissingSubject()); + matchers.add(new EnrichMoreOpenAccess()); + matchers.add(new EnrichMorePid()); + matchers.add(new EnrichMoreSubject()); + + // Advanced matchers + matchers.add(new EnrichMissingProject()); + matchers.add(new EnrichMoreProject()); + matchers.add(new EnrichMissingSoftware()); + matchers.add(new EnrichMoreSoftware()); + matchers.add(new EnrichMissingPublicationIsRelatedTo()); + matchers.add(new EnrichMissingPublicationIsReferencedBy()); + matchers.add(new EnrichMissingPublicationReferences()); + matchers.add(new EnrichMissingPublicationIsSupplementedTo()); + matchers.add(new EnrichMissingPublicationIsSupplementedBy()); + matchers.add(new EnrichMissingDatasetIsRelatedTo()); + matchers.add(new EnrichMissingDatasetIsReferencedBy()); + matchers.add(new EnrichMissingDatasetReferences()); + matchers.add(new EnrichMissingDatasetIsSupplementedTo()); + matchers.add(new EnrichMissingDatasetIsSupplementedBy()); + matchers.add(new EnrichMissingAbstract()); + } + + public static EventGroup generateEvents(final ResultGroup results, final DedupConfig dedupConfig) { + final List> list = new ArrayList<>(); + + for (final OpenaireBrokerResult target : results.getData()) { + for (final UpdateMatcher matcher : matchers) { + list.addAll(matcher.searchUpdatesForRecord(target, results.getData(), dedupConfig)); + } + } + + return asEventGroup(list); + } + + private static EventGroup asEventGroup(final List> list) { + final EventGroup events = new EventGroup(); + list.stream().map(EventFactory::newBrokerEvent).forEach(events::addElement); + return events; + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java index 893aa2827..82d017864 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/UpdateInfo.java @@ -1,7 +1,6 @@ package eu.dnetlib.dhp.broker.oa.util; -import java.util.List; import java.util.function.BiConsumer; import java.util.function.Function; @@ -10,13 +9,11 @@ import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.broker.objects.Instance; import eu.dnetlib.broker.objects.OpenAireEventPayload; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.broker.objects.Provenance; -import eu.dnetlib.broker.objects.Publication; import eu.dnetlib.dhp.broker.model.Topic; -import eu.dnetlib.dhp.schema.oaf.Instance; -import eu.dnetlib.dhp.schema.oaf.KeyValue; -import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.pace.config.DedupConfig; import eu.dnetlib.pace.model.MapDocument; import eu.dnetlib.pace.tree.support.TreeProcessor; @@ -28,11 +25,11 @@ public final class UpdateInfo { private final T highlightValue; - private final Result source; + private final OpenaireBrokerResult source; - private final Result target; + private final OpenaireBrokerResult target; - private final BiConsumer compileHighlight; + private final BiConsumer compileHighlight; private final Function highlightToString; @@ -40,8 +37,9 @@ public final class UpdateInfo { private static final Logger log = LoggerFactory.getLogger(UpdateInfo.class); - public UpdateInfo(final Topic topic, final T highlightValue, final Result source, final Result target, - final BiConsumer compileHighlight, + public UpdateInfo(final Topic topic, final T highlightValue, final OpenaireBrokerResult source, + final OpenaireBrokerResult target, + final BiConsumer compileHighlight, final Function highlightToString, final DedupConfig dedupConfig) { this.topic = topic; @@ -57,15 +55,16 @@ public final class UpdateInfo { return highlightValue; } - public Result getSource() { + public OpenaireBrokerResult getSource() { return source; } - public Result getTarget() { + public OpenaireBrokerResult getTarget() { return target; } - private float calculateTrust(final DedupConfig dedupConfig, final Result r1, final Result r2) { + private float calculateTrust(final DedupConfig dedupConfig, final OpenaireBrokerResult r1, + final OpenaireBrokerResult r2) { try { final ObjectMapper objectMapper = new ObjectMapper(); final MapDocument doc1 = MapDocumentUtil @@ -101,24 +100,18 @@ public final class UpdateInfo { public OpenAireEventPayload asBrokerPayload() { - final Publication p = ConversionUtils.oafResultToBrokerPublication(getSource()); - compileHighlight.accept(p, getHighlightValue()); + compileHighlight.accept(target, getHighlightValue()); - final Publication hl = new Publication(); + final OpenaireBrokerResult hl = new OpenaireBrokerResult(); compileHighlight.accept(hl, getHighlightValue()); - final String provId = getSource().getOriginalId().stream().findFirst().orElse(null); - final String provRepo = getSource() - .getCollectedfrom() - .stream() - .map(KeyValue::getValue) - .findFirst() - .orElse(null); + final String provId = getSource().getOriginalId(); + final String provRepo = getSource().getCollectedFromName(); + final String provUrl = getSource() - .getInstance() + .getInstances() .stream() .map(Instance::getUrl) - .flatMap(List::stream) .findFirst() .orElse(null); ; @@ -126,7 +119,7 @@ public final class UpdateInfo { final Provenance provenance = new Provenance().setId(provId).setRepositoryName(provRepo).setUrl(provUrl); return new OpenAireEventPayload() - .setPublication(p) + .setPublication(target) .setHighlight(hl) .setTrust(trust) .setProvenance(provenance); diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultAggregator.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultAggregator.java similarity index 76% rename from dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultAggregator.java rename to dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultAggregator.java index 475c76814..dabe2bb4d 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultAggregator.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultAggregator.java @@ -1,15 +1,15 @@ -package eu.dnetlib.dhp.broker.oa.util; +package eu.dnetlib.dhp.broker.oa.util.aggregators.simple; import org.apache.spark.sql.Encoder; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.expressions.Aggregator; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; import eu.dnetlib.dhp.schema.oaf.Relation; -import eu.dnetlib.dhp.schema.oaf.Result; import scala.Tuple2; -public class ResultAggregator extends Aggregator, ResultGroup, ResultGroup> { +public class ResultAggregator extends Aggregator, ResultGroup, ResultGroup> { /** * @@ -22,7 +22,7 @@ public class ResultAggregator extends Aggregator, Resul } @Override - public ResultGroup reduce(final ResultGroup group, final Tuple2 t) { + public ResultGroup reduce(final ResultGroup group, final Tuple2 t) { return group.addElement(t._1); } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultGroup.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultGroup.java similarity index 60% rename from dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultGroup.java rename to dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultGroup.java index 2be673db0..4308224a5 100644 --- a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/ResultGroup.java +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/simple/ResultGroup.java @@ -1,11 +1,11 @@ -package eu.dnetlib.dhp.broker.oa.util; +package eu.dnetlib.dhp.broker.oa.util.aggregators.simple; import java.io.Serializable; import java.util.ArrayList; import java.util.List; -import eu.dnetlib.dhp.schema.oaf.Result; +import eu.dnetlib.broker.objects.OpenaireBrokerResult; public class ResultGroup implements Serializable { @@ -14,13 +14,13 @@ public class ResultGroup implements Serializable { */ private static final long serialVersionUID = -3360828477088669296L; - private final List data = new ArrayList<>(); + private final List data = new ArrayList<>(); - public List getData() { + public List getData() { return data; } - public ResultGroup addElement(final Result elem) { + public ResultGroup addElement(final OpenaireBrokerResult elem) { data.add(elem); return this; } diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/OpenaireBrokerResultAggregator.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/OpenaireBrokerResultAggregator.java new file mode 100644 index 000000000..b44fbe367 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/OpenaireBrokerResultAggregator.java @@ -0,0 +1,69 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import org.apache.spark.sql.Encoder; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.expressions.Aggregator; + +import eu.dnetlib.broker.objects.OpenaireBrokerResult; +import scala.Tuple2; + +public class OpenaireBrokerResultAggregator + extends Aggregator, OpenaireBrokerResult, OpenaireBrokerResult> { + + /** + * + */ + private static final long serialVersionUID = -3687878788861013488L; + + @Override + public OpenaireBrokerResult zero() { + return new OpenaireBrokerResult(); + } + + @Override + public OpenaireBrokerResult finish(final OpenaireBrokerResult g) { + return g; + } + + @Override + public OpenaireBrokerResult reduce(final OpenaireBrokerResult g, final Tuple2 t) { + if (g.getOriginalId() == null) { + return t._1; + } else if (t._2 instanceof RelatedSoftware) { + g.getSoftwares().add(((RelatedSoftware) t._2).getRelSoftware()); + } else if (t._2 instanceof RelatedDataset) { + g.getDatasets().add(((RelatedDataset) t._2).getRelDataset()); + } else if (t._2 instanceof RelatedPublication) { + g.getPublications().add(((RelatedPublication) t._2).getRelPublication()); + } else if (t._2 instanceof RelatedProject) { + g.getProjects().add(((RelatedProject) t._2).getRelProject()); + } + return g; + + } + + @Override + public OpenaireBrokerResult merge(final OpenaireBrokerResult g1, final OpenaireBrokerResult g2) { + if (g1.getOriginalId() != null) { + g1.getSoftwares().addAll(g2.getSoftwares()); + g1.getDatasets().addAll(g2.getDatasets()); + g1.getPublications().addAll(g2.getPublications()); + g1.getProjects().addAll(g2.getProjects()); + return g1; + } else { + return g2; + } + } + + @Override + public Encoder bufferEncoder() { + return Encoders.kryo(OpenaireBrokerResult.class); + } + + @Override + public Encoder outputEncoder() { + return Encoders.kryo(OpenaireBrokerResult.class); + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedDataset.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedDataset.java new file mode 100644 index 000000000..fcf1b89b1 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedDataset.java @@ -0,0 +1,36 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import java.io.Serializable; + +import eu.dnetlib.broker.objects.Dataset; + +public class RelatedDataset implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 774487705184038324L; + private final String source; + private final String relType; + private final Dataset relDataset; + + public RelatedDataset(final String source, final String relType, final Dataset relDataset) { + this.source = source; + this.relType = relType; + this.relDataset = relDataset; + } + + public String getSource() { + return source; + } + + public String getRelType() { + return relType; + } + + public Dataset getRelDataset() { + return relDataset; + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedEntityFactory.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedEntityFactory.java new file mode 100644 index 000000000..c60d4f141 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedEntityFactory.java @@ -0,0 +1,34 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import eu.dnetlib.dhp.broker.oa.util.ConversionUtils; +import eu.dnetlib.dhp.schema.oaf.Dataset; +import eu.dnetlib.dhp.schema.oaf.Project; +import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Software; + +public class RelatedEntityFactory { + + @SuppressWarnings("unchecked") + public static RT newRelatedEntity(final String sourceId, + final String relType, + final T target, + final Class clazz) { + + if (clazz == RelatedProject.class) { + return (RT) new RelatedProject(sourceId, relType, + ConversionUtils.oafProjectToBrokerProject((Project) target)); + } else if (clazz == RelatedSoftware.class) { + return (RT) new RelatedSoftware(sourceId, relType, + ConversionUtils.oafSoftwareToBrokerSoftware((Software) target)); + } else if (clazz == RelatedDataset.class) { + return (RT) new RelatedDataset(sourceId, relType, + ConversionUtils.oafDatasetToBrokerDataset((Dataset) target)); + } else if (clazz == RelatedPublication.class) { + return (RT) new RelatedPublication(sourceId, relType, + ConversionUtils.oafPublicationToBrokerPublication((Publication) target)); + } else { + return null; + } + } +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedProject.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedProject.java new file mode 100644 index 000000000..233041c09 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedProject.java @@ -0,0 +1,37 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import java.io.Serializable; + +import eu.dnetlib.broker.objects.Project; + +public class RelatedProject implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 4941437626549329870L; + + private final String source; + private final String relType; + private final Project relProject; + + public RelatedProject(final String source, final String relType, final Project relProject) { + this.source = source; + this.relType = relType; + this.relProject = relProject; + } + + public String getSource() { + return source; + } + + public String getRelType() { + return relType; + } + + public Project getRelProject() { + return relProject; + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedPublication.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedPublication.java new file mode 100644 index 000000000..80b92462d --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedPublication.java @@ -0,0 +1,37 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import java.io.Serializable; + +import eu.dnetlib.broker.objects.Publication; + +public class RelatedPublication implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 9021609640411395128L; + + private final String source; + private final String relType; + private final Publication relPublication; + + public RelatedPublication(final String source, final String relType, final Publication relPublication) { + this.source = source; + this.relType = relType; + this.relPublication = relPublication; + } + + public String getSource() { + return source; + } + + public String getRelType() { + return relType; + } + + public Publication getRelPublication() { + return relPublication; + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedSoftware.java b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedSoftware.java new file mode 100644 index 000000000..13f1f4290 --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/java/eu/dnetlib/dhp/broker/oa/util/aggregators/withRels/RelatedSoftware.java @@ -0,0 +1,36 @@ + +package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels; + +import java.io.Serializable; + +import eu.dnetlib.broker.objects.Software; + +public class RelatedSoftware implements Serializable { + + /** + * + */ + private static final long serialVersionUID = 7573383356943300157L; + private final String source; + private final String relType; + private final Software relSoftware; + + public RelatedSoftware(final String source, final String relType, final Software relSoftware) { + this.source = source; + this.relType = relType; + this.relSoftware = relSoftware; + } + + public String getSource() { + return source; + } + + public String getRelType() { + return relType; + } + + public Software getRelSoftware() { + return relSoftware; + } + +} diff --git a/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/config-default.xml b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/config-default.xml new file mode 100644 index 000000000..2e0ed9aee --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/config-default.xml @@ -0,0 +1,18 @@ + + + jobTracker + yarnRM + + + nameNode + hdfs://nameservice1 + + + oozie.use.system.libpath + true + + + oozie.action.sharelib.for.spark + spark2 + + \ No newline at end of file diff --git a/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/workflow.xml b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/workflow.xml new file mode 100644 index 000000000..da573ae9c --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_all/oozie_app/workflow.xml @@ -0,0 +1,99 @@ + + + + + graphInputPath + the path where the graph is stored + + + eventsOutputPath + the path where the the events will be stored + + + isLookupUrl + the address of the lookUp service + + + dedupConfProfId + the id of a valid Dedup Configuration Profile + + + + sparkDriverMemory + memory for driver process + + + sparkExecutorMemory + memory for individual executor + + + sparkExecutorCores + number of cores used by single executor + + + oozieActionShareLibForSpark2 + oozie action sharelib for spark 2.* + + + spark2ExtraListeners + com.cloudera.spark.lineage.NavigatorAppListener + spark 2.* extra listeners classname + + + spark2SqlQueryExecutionListeners + com.cloudera.spark.lineage.NavigatorQueryListener + spark 2.* sql query execution listeners classname + + + spark2YarnHistoryServerAddress + spark 2.* yarn history server address + + + spark2EventLogDir + spark 2.* event log dir location + + + + + ${jobTracker} + ${nameNode} + + + mapreduce.job.queuename + ${queueName} + + + oozie.launcher.mapred.job.queue.name + ${oozieLauncherQueueName} + + + oozie.action.sharelib.for.spark + ${oozieActionShareLibForSpark2} + + + + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] + + + + + + + + eu.dnetlib.dhp.broker.oa.GenerateEventsApplication + --graphPath${graphInputPath} + --eventsPath${eventsOutputPath} + --isLookupUrl${isLookupUrl} + --dedupConfProfile${dedupConfProfId} + + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_broker_events.json b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_broker_events.json new file mode 100644 index 000000000..6ab6d9a2d --- /dev/null +++ b/dhp-workflows/dhp-broker-events/src/main/resources/eu/dnetlib/dhp/broker/oa/generate_broker_events.json @@ -0,0 +1,26 @@ +[ + { + "paramName": "g", + "paramLongName": "graphPath", + "paramDescription": "the path where there the graph is stored", + "paramRequired": true + }, + { + "paramName": "o", + "paramLongName": "eventsPath", + "paramDescription": "the path where the generated events will be stored", + "paramRequired": true + }, + { + "paramName": "lu", + "paramLongName": "isLookupUrl", + "paramDescription": "the address of the ISLookUpService", + "paramRequired": true + }, + { + "paramName": "d", + "paramLongName": "dedupConfProfile", + "paramDescription": "the id of a valid Dedup Configuration Profile", + "paramRequired": true + } +] diff --git a/dhp-workflows/dhp-dedup-openaire/pom.xml b/dhp-workflows/dhp-dedup-openaire/pom.xml index 44cf9e67c..1f5f2620e 100644 --- a/dhp-workflows/dhp-dedup-openaire/pom.xml +++ b/dhp-workflows/dhp-dedup-openaire/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 dhp-dedup-openaire diff --git a/dhp-workflows/dhp-dedup-openaire/src/test/resources/eu/dnetlib/dhp/dedup/json/publication_merge.json b/dhp-workflows/dhp-dedup-openaire/src/test/resources/eu/dnetlib/dhp/dedup/json/publication_merge.json index 28548c532..ae688e746 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/test/resources/eu/dnetlib/dhp/dedup/json/publication_merge.json +++ b/dhp-workflows/dhp-dedup-openaire/src/test/resources/eu/dnetlib/dhp/dedup/json/publication_merge.json @@ -1,3 +1,3 @@ -{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:datasetarchive", "classname": "sysimport:crosswalk:datasetarchive", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": false, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.95"}, "resourcetype": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "pid": [], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}], "id": "50|a89337edbe55::4930db9e954866d70916cbfba9f81f97", "subject": [], "instance": [{"refereed": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "url": [], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}, "accessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0001", "classname": "Article", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0000-0656-9999"},{"qualifier": {"classid": "id", "classname": "id"}, "value": "987654321"}], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}], "source": [], "dateofcollection": "2019-11-05T14:49:22.351Z", "fulltext": [], "dateoftransformation": "2019-11-05T16:10:58.988Z", "description": [], "format": [], "journal": {"issnPrinted": "1459-6067", "conferencedate": "", "conferenceplace": "", "name": "Agricultural and Food Science", "edition": "", "iss": "3", "sp": "", "vol": "27", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "1795-1895", "ep": "", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "language": {"classid": "eng", "classname": "English", "schemename": "dnet:languages", "schemeid": "dnet:languages"}, "bestaccessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [], "extraInfo": [], "originalId": [], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2018-09-30"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "main title", "classname": "main title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Altered brain activation in a reversal learning task unmasks adaptive changes in cognitive control in writer's cramp"}]} -{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:repository", "classname": "sysimport:crosswalk:repository", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": true, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.9"}, "resourcetype": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "pid": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "doi", "classname": "doi", "schemename": "dnet:pid_types", "schemeid": "dnet:pid_types"}, "value": "10.1016/j.nicl.2015.11.006"}], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "BASE (Open Access Aggregator)", "key": "10|openaire____::df45502607927471ecf8a6ae83683ff5"}], "id": "50|base_oa_____::0968af610a356656706657e4f234b340", "subject": [], "instance": [{"refereed": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "NeuroImage: Clinical", "key": "10|doajarticles::0c0e74daa5d95504eade9c81ebbd5b8a"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "http://creativecommons.org/licenses/by-nc-nd/4.0/"}, "url": ["http://dx.doi.org/10.1016/j.nicl.2015.11.006"], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "BASE (Open Access Aggregator)", "key": "10|openaire____::df45502607927471ecf8a6ae83683ff5"}, "accessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0001", "classname": "Article", "schemename": "dnet:publication_resource", "schemeid": "dnet:publication_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Zeuner", "name": "Kirsten E.", "pid": [], "rank": 1, "affiliation": [], "fullname": "Zeuner, Kirsten E."}, {"surname": "Knutzen", "name": "Arne", "pid": [], "rank": 2, "affiliation": [], "fullname": "Knutzen, Arne"}, {"surname": "Granert", "name": "Oliver", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0002-0656-1023"}], "rank": 3, "affiliation": [], "fullname": "Granert, Oliver"}, {"surname": "Sablowsky", "name": "Simone", "pid": [], "rank": 4, "affiliation": [], "fullname": "Sablowsky, Simone"}, {"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}, {"surname": "Klein", "name": "Christine", "pid": [], "rank": 10, "affiliation": [], "fullname": "Klein, Christine"}, {"surname": "Deuschl", "name": "Gu\\u0308nther", "pid": [], "rank": 11, "affiliation": [], "fullname": "Deuschl, G\\u00fcnther"}, {"surname": "Eimeren", "name": "Thilo", "pid": [], "rank": 12, "affiliation": [], "fullname": "van Eimeren, Thilo"}, {"surname": "Witt", "name": "Karsten", "pid": [], "rank": 13, "affiliation": [], "fullname": "Witt, Karsten"}], "source": [], "dateofcollection": "2017-07-27T19:04:09.131Z", "fulltext": [], "dateoftransformation": "2019-01-23T10:15:19.582Z", "description": [], "format": [], "journal": {"issnPrinted": "2213-1582", "conferencedate": "", "conferenceplace": "", "name": "NeuroImage: Clinical", "edition": "", "iss": "", "sp": "63", "vol": "10", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "", "ep": "70", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Elsevier BV"}, "language": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "bestaccessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [{"classid": "IT", "classname": "Italy", "schemeid": "dnet:countries", "schemename": "dnet:countries"}], "extraInfo": [], "originalId": ["10.1016/j.nicl.2015.11.006"], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "main title", "classname": "main title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Altered brain activation in a reversal learning task unmasks adaptive changes in cognitive control in writer's cramp"}]} -{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:datasetarchive", "classname": "sysimport:crosswalk:datasetarchive", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": true, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.9"}, "resourcetype": {"classid": "0004", "classname": "Conference object", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}, "pid": [], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}], "id": "50|CrisUnsNoviS::9f9d014eea45dab432cab636c4c9cf39", "subject": [], "instance": [{"refereed": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "url": ["https://www.cris.uns.ac.rs/record.jsf?recordId=113444&source=OpenAIRE&language=en"], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2019-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}, "accessright": {"classid": "UNKNOWN", "classname": "UNKNOWN", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0004", "classname": "Conference object", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Zeuner", "name": "Kirsten E.", "pid": [], "rank": 1, "affiliation": [], "fullname": "Zeuner, Kirsten E."}, {"surname": "Knutzen", "name": "Arne", "pid": [], "rank": 2, "affiliation": [], "fullname": "Knutzen, Arne"}, {"surname": "Granert", "name": "Oliver", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0002-0656-1023"}, {"qualifier": {"classid": "pubmed", "classname": "pubmed"}, "value": "pubmed.it"}], "rank": 3, "affiliation": [], "fullname": "Granert, Oliver"}, {"surname": "Sablowsky", "name": "Simone", "pid": [{"qualifier": {"classid": "id", "classname": "id"}, "value": "12345678"}], "rank": 4, "affiliation": [], "fullname": "Sablowsky, Simone"}, {"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0000-0656-1023"},{"qualifier": {"classid": "id", "classname": "id"}, "value": "987654321"}], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}], "source": [], "dateofcollection": "2020-03-10T15:05:38.685Z", "fulltext": [], "dateoftransformation": "2020-03-11T20:11:13.15Z", "description": [], "format": [], "journal": {"issnPrinted": "", "conferencedate": "", "conferenceplace": "", "name": "", "edition": "", "iss": "", "sp": "", "vol": "", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "", "ep": "", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "language": {"classid": "en", "classname": "en", "schemename": "dnet:languages", "schemeid": "dnet:languages"}, "bestaccessright": {"classid": "UNKNOWN", "classname": "not available", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [{"classid": "FI", "classname": "Finland", "schemeid": "dnet:countries", "schemename": "dnet:countries"}], "extraInfo": [], "originalId": ["(BISIS)113444", "https://www.cris.uns.ac.rs/record.jsf?recordId=113444&source=OpenAIRE&language=en"], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "test title", "classname": "test title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Antichains of copies of ultrahomogeneous structures"}]} \ No newline at end of file +{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:datasetarchive", "classname": "sysimport:crosswalk:datasetarchive", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": false, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.95"}, "resourcetype": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "pid": [], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}], "id": "50|a89337edbe55::4930db9e954866d70916cbfba9f81f97", "subject": [], "instance": [{"refereed": null, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "url": [], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Journal.fi", "key": "10|openaire____::6eef8049d0feedc089ee009abca55e35"}, "accessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0001", "classname": "Article", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0000-0656-9999"},{"qualifier": {"classid": "id", "classname": "id"}, "value": "987654321"}], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}], "source": [], "dateofcollection": "2019-11-05T14:49:22.351Z", "fulltext": [], "dateoftransformation": "2019-11-05T16:10:58.988Z", "description": [], "format": [], "journal": {"issnPrinted": "1459-6067", "conferencedate": "", "conferenceplace": "", "name": "Agricultural and Food Science", "edition": "", "iss": "3", "sp": "", "vol": "27", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "1795-1895", "ep": "", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "language": {"classid": "eng", "classname": "English", "schemename": "dnet:languages", "schemeid": "dnet:languages"}, "bestaccessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [], "extraInfo": [], "originalId": [], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2018-09-30"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "main title", "classname": "main title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Altered brain activation in a reversal learning task unmasks adaptive changes in cognitive control in writer's cramp"}]} +{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:repository", "classname": "sysimport:crosswalk:repository", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": true, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.9"}, "resourcetype": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "pid": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "doi", "classname": "doi", "schemename": "dnet:pid_types", "schemeid": "dnet:pid_types"}, "value": "10.1016/j.nicl.2015.11.006"}], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "BASE (Open Access Aggregator)", "key": "10|openaire____::df45502607927471ecf8a6ae83683ff5"}], "id": "50|base_oa_____::0968af610a356656706657e4f234b340", "subject": [], "instance": [{"refereed": null, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "NeuroImage: Clinical", "key": "10|doajarticles::0c0e74daa5d95504eade9c81ebbd5b8a"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "http://creativecommons.org/licenses/by-nc-nd/4.0/"}, "url": ["http://dx.doi.org/10.1016/j.nicl.2015.11.006"], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "BASE (Open Access Aggregator)", "key": "10|openaire____::df45502607927471ecf8a6ae83683ff5"}, "accessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0001", "classname": "Article", "schemename": "dnet:publication_resource", "schemeid": "dnet:publication_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Zeuner", "name": "Kirsten E.", "pid": [], "rank": 1, "affiliation": [], "fullname": "Zeuner, Kirsten E."}, {"surname": "Knutzen", "name": "Arne", "pid": [], "rank": 2, "affiliation": [], "fullname": "Knutzen, Arne"}, {"surname": "Granert", "name": "Oliver", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0002-0656-1023"}], "rank": 3, "affiliation": [], "fullname": "Granert, Oliver"}, {"surname": "Sablowsky", "name": "Simone", "pid": [], "rank": 4, "affiliation": [], "fullname": "Sablowsky, Simone"}, {"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}, {"surname": "Klein", "name": "Christine", "pid": [], "rank": 10, "affiliation": [], "fullname": "Klein, Christine"}, {"surname": "Deuschl", "name": "Gu\\u0308nther", "pid": [], "rank": 11, "affiliation": [], "fullname": "Deuschl, G\\u00fcnther"}, {"surname": "Eimeren", "name": "Thilo", "pid": [], "rank": 12, "affiliation": [], "fullname": "van Eimeren, Thilo"}, {"surname": "Witt", "name": "Karsten", "pid": [], "rank": 13, "affiliation": [], "fullname": "Witt, Karsten"}], "source": [], "dateofcollection": "2017-07-27T19:04:09.131Z", "fulltext": [], "dateoftransformation": "2019-01-23T10:15:19.582Z", "description": [], "format": [], "journal": {"issnPrinted": "2213-1582", "conferencedate": "", "conferenceplace": "", "name": "NeuroImage: Clinical", "edition": "", "iss": "", "sp": "63", "vol": "10", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "", "ep": "70", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "Elsevier BV"}, "language": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "bestaccessright": {"classid": "OPEN", "classname": "Open Access", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [{"classid": "IT", "classname": "Italy", "schemeid": "dnet:countries", "schemename": "dnet:countries"}], "extraInfo": [], "originalId": ["10.1016/j.nicl.2015.11.006"], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "main title", "classname": "main title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Altered brain activation in a reversal learning task unmasks adaptive changes in cognitive control in writer's cramp"}]} +{"context": [], "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "sysimport:crosswalk:datasetarchive", "classname": "sysimport:crosswalk:datasetarchive", "schemename": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions"}, "inferred": true, "inferenceprovenance": "dedup-similarity-result-levenstein", "invisible": false, "trust": "0.9"}, "resourcetype": {"classid": "0004", "classname": "Conference object", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}, "pid": [], "contributor": [], "resulttype": {"classid": "publication", "classname": "publication", "schemename": "dnet:result_typologies", "schemeid": "dnet:result_typologies"}, "relevantdate": [], "collectedfrom": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}], "id": "50|CrisUnsNoviS::9f9d014eea45dab432cab636c4c9cf39", "subject": [], "instance": [{"refereed": null, "hostedby": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}, "processingchargeamount": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "license": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "url": ["https://www.cris.uns.ac.rs/record.jsf?recordId=113444&source=OpenAIRE&language=en"], "distributionlocation": "", "processingchargecurrency": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2019-01-01"}, "collectedfrom": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "CRIS UNS (Current Research Information System University of Novi Sad)", "key": "10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556"}, "accessright": {"classid": "UNKNOWN", "classname": "UNKNOWN", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "instancetype": {"classid": "0004", "classname": "Conference object", "schemename": "dnet:dataCite_resource", "schemeid": "dnet:dataCite_resource"}}], "embargoenddate": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "lastupdatetimestamp": 0, "author": [{"surname": "Zeuner", "name": "Kirsten E.", "pid": [], "rank": 1, "affiliation": [], "fullname": "Zeuner, Kirsten E."}, {"surname": "Knutzen", "name": "Arne", "pid": [], "rank": 2, "affiliation": [], "fullname": "Knutzen, Arne"}, {"surname": "Granert", "name": "Oliver", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0002-0656-1023"}, {"qualifier": {"classid": "pubmed", "classname": "pubmed"}, "value": "pubmed.it"}], "rank": 3, "affiliation": [], "fullname": "Granert, Oliver"}, {"surname": "Sablowsky", "name": "Simone", "pid": [{"qualifier": {"classid": "id", "classname": "id"}, "value": "12345678"}], "rank": 4, "affiliation": [], "fullname": "Sablowsky, Simone"}, {"surname": "Go\\u0308tz", "name": "Julia", "pid": [], "rank": 5, "affiliation": [], "fullname": "G\\u00f6tz, Julia"}, {"surname": "Wolff", "name": "Stephan", "pid": [], "rank": 6, "affiliation": [], "fullname": "Wolff, Stephan"}, {"surname": "Jansen", "name": "Olav", "pid": [{"qualifier": {"classid": "ORCID", "classname": "ORCID"}, "value": "0000-0000-0656-1023"},{"qualifier": {"classid": "id", "classname": "id"}, "value": "987654321"}], "rank": 7, "affiliation": [], "fullname": "Jansen, Olav"}, {"surname": "Dressler", "name": "Dirk", "pid": [], "rank": 8, "affiliation": [], "fullname": "Dressler, Dirk"}, {"surname": "Schneider", "name": "Susanne A.", "pid": [], "rank": 9, "affiliation": [], "fullname": "Schneider, Susanne A."}], "source": [], "dateofcollection": "2020-03-10T15:05:38.685Z", "fulltext": [], "dateoftransformation": "2020-03-11T20:11:13.15Z", "description": [], "format": [], "journal": {"issnPrinted": "", "conferencedate": "", "conferenceplace": "", "name": "", "edition": "", "iss": "", "sp": "", "vol": "", "dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "issnOnline": "", "ep": "", "issnLinking": ""}, "coverage": [], "publisher": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": ""}, "language": {"classid": "en", "classname": "en", "schemename": "dnet:languages", "schemeid": "dnet:languages"}, "bestaccessright": {"classid": "UNKNOWN", "classname": "not available", "schemename": "dnet:access_modes", "schemeid": "dnet:access_modes"}, "country": [{"classid": "FI", "classname": "Finland", "schemeid": "dnet:countries", "schemename": "dnet:countries"}], "extraInfo": [], "originalId": ["(BISIS)113444", "https://www.cris.uns.ac.rs/record.jsf?recordId=113444&source=OpenAIRE&language=en"], "dateofacceptance": {"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "value": "2016-01-01"}, "title": [{"dataInfo": {"deletedbyinference": false, "provenanceaction": {"classid": "", "classname": "", "schemename": "", "schemeid": ""}, "inferred": false, "inferenceprovenance": "", "invisible": false, "trust": ""}, "qualifier": {"classid": "test title", "classname": "test title", "schemename": "dnet:dataCite_title", "schemeid": "dnet:dataCite_title"}, "value": "Antichains of copies of ultrahomogeneous structures"}]} \ No newline at end of file diff --git a/dhp-workflows/dhp-dedup-scholexplorer/pom.xml b/dhp-workflows/dhp-dedup-scholexplorer/pom.xml index 429c8a648..e9e11b417 100644 --- a/dhp-workflows/dhp-dedup-scholexplorer/pom.xml +++ b/dhp-workflows/dhp-dedup-scholexplorer/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-distcp/pom.xml b/dhp-workflows/dhp-distcp/pom.xml index 8454c29a4..5707ddfc5 100644 --- a/dhp-workflows/dhp-distcp/pom.xml +++ b/dhp-workflows/dhp-distcp/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-doiboost/pom.xml b/dhp-workflows/dhp-doiboost/pom.xml index 168442942..39bb81ec1 100644 --- a/dhp-workflows/dhp-doiboost/pom.xml +++ b/dhp-workflows/dhp-doiboost/pom.xml @@ -1,10 +1,9 @@ - + dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/Crossref2Oaf.scala b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/Crossref2Oaf.scala index c1089ec28..f39dd5be8 100644 --- a/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/Crossref2Oaf.scala +++ b/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/Crossref2Oaf.scala @@ -166,8 +166,10 @@ case object Crossref2Oaf { val has_review = (json \ "relation" \"has-review" \ "id") - if(has_review != JNothing) - instance.setRefereed(asField("peerReviewed")) + if(has_review != JNothing) { + instance.setRefereed( + createQualifier("0001", "peerReviewed", "dnet:review_levels", "dnet:review_levels")) + } instance.setAccessright(getRestrictedQualifier()) diff --git a/dhp-workflows/dhp-enrichment/pom.xml b/dhp-workflows/dhp-enrichment/pom.xml index 2dc0f2436..e71a72f3e 100644 --- a/dhp-workflows/dhp-enrichment/pom.xml +++ b/dhp-workflows/dhp-enrichment/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-graph-mapper/pom.xml b/dhp-workflows/dhp-graph-mapper/pom.xml index aee3d27c1..f650f1c17 100644 --- a/dhp-workflows/dhp-graph-mapper/pom.xml +++ b/dhp-workflows/dhp-graph-mapper/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleanGraphSparkJob.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleanGraphSparkJob.java new file mode 100644 index 000000000..b2c7152d5 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleanGraphSparkJob.java @@ -0,0 +1,109 @@ + +package eu.dnetlib.dhp.oa.graph.clean; + +import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; + +import java.util.Optional; + +import org.apache.commons.io.IOUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SaveMode; +import org.apache.spark.sql.SparkSession; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.common.HdfsSupport; +import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup; +import eu.dnetlib.dhp.schema.oaf.*; +import eu.dnetlib.dhp.utils.ISLookupClientFactory; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; + +public class CleanGraphSparkJob { + + private static final Logger log = LoggerFactory.getLogger(CleanGraphSparkJob.class); + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + public static void main(String[] args) throws Exception { + + String jsonConfiguration = IOUtils + .toString( + CleanGraphSparkJob.class + .getResourceAsStream( + "/eu/dnetlib/dhp/oa/graph/input_clean_graph_parameters.json")); + final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); + parser.parseArgument(args); + + Boolean isSparkSessionManaged = Optional + .ofNullable(parser.get("isSparkSessionManaged")) + .map(Boolean::valueOf) + .orElse(Boolean.TRUE); + log.info("isSparkSessionManaged: {}", isSparkSessionManaged); + + String inputPath = parser.get("inputPath"); + log.info("inputPath: {}", inputPath); + + String outputPath = parser.get("outputPath"); + log.info("outputPath: {}", outputPath); + + String isLookupUrl = parser.get("isLookupUrl"); + log.info("isLookupUrl: {}", isLookupUrl); + + String graphTableClassName = parser.get("graphTableClassName"); + log.info("graphTableClassName: {}", graphTableClassName); + + Class entityClazz = (Class) Class.forName(graphTableClassName); + + final ISLookUpService isLookupService = ISLookupClientFactory.getLookUpService(isLookupUrl); + final VocabularyGroup vocs = VocabularyGroup.loadVocsFromIS(isLookupService); + + SparkConf conf = new SparkConf(); + runWithSparkSession( + conf, + isSparkSessionManaged, + spark -> { + removeOutputDir(spark, outputPath); + fixGraphTable(spark, vocs, inputPath, entityClazz, outputPath); + }); + } + + private static void fixGraphTable( + SparkSession spark, + VocabularyGroup vocs, + String inputPath, + Class clazz, + String outputPath) { + + final CleaningRuleMap mapping = CleaningRuleMap.create(vocs); + + readTableFromPath(spark, inputPath, clazz) + .map((MapFunction) value -> OafCleaner.apply(value, mapping), Encoders.bean(clazz)) + .write() + .mode(SaveMode.Overwrite) + .option("compression", "gzip") + .json(outputPath); + } + + private static Dataset readTableFromPath( + SparkSession spark, String inputEntityPath, Class clazz) { + + log.info("Reading Graph table from: {}", inputEntityPath); + return spark + .read() + .textFile(inputEntityPath) + .map( + (MapFunction) value -> OBJECT_MAPPER.readValue(value, clazz), + Encoders.bean(clazz)); + } + + private static void removeOutputDir(SparkSession spark, String path) { + HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); + } + +} diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleaningRuleMap.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleaningRuleMap.java new file mode 100644 index 000000000..8006f7300 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/CleaningRuleMap.java @@ -0,0 +1,40 @@ + +package eu.dnetlib.dhp.oa.graph.clean; + +import java.io.Serializable; +import java.util.HashMap; + +import eu.dnetlib.dhp.common.FunctionalInterfaceSupport.SerializableConsumer; +import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; + +public class CleaningRuleMap extends HashMap> implements Serializable { + + /** + * Creates the mapping for the Oaf types subject to cleaning + * + * @param vocabularies + */ + public static CleaningRuleMap create(VocabularyGroup vocabularies) { + CleaningRuleMap mapping = new CleaningRuleMap(); + mapping.put(Qualifier.class, o -> { + Qualifier q = (Qualifier) o; + if (vocabularies.vocabularyExists(q.getSchemeid())) { + Qualifier newValue = vocabularies.lookup(q.getSchemeid(), q.getClassid()); + q.setClassid(newValue.getClassid()); + q.setClassname(newValue.getClassname()); + } + }); + mapping.put(StructuredProperty.class, o -> { + StructuredProperty sp = (StructuredProperty) o; + // TODO implement a policy + /* + * if (StringUtils.isBlank(sp.getValue())) { sp.setValue(null); sp.setQualifier(null); sp.setDataInfo(null); + * } + */ + }); + return mapping; + } + +} diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/OafCleaner.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/OafCleaner.java new file mode 100644 index 000000000..9ba153ba5 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/clean/OafCleaner.java @@ -0,0 +1,82 @@ + +package eu.dnetlib.dhp.oa.graph.clean; + +import java.io.Serializable; +import java.lang.reflect.Field; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import eu.dnetlib.dhp.schema.oaf.Oaf; + +public class OafCleaner implements Serializable { + + public static E apply(E oaf, CleaningRuleMap mapping) { + try { + navigate(oaf, mapping); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + return oaf; + } + + private static void navigate(Object o, CleaningRuleMap mapping) throws IllegalAccessException { + if (isPrimitive(o)) { + return; + } else if (isIterable(o.getClass())) { + for (final Object elem : (Iterable) o) { + navigate(elem, mapping); + } + } else if (hasMapping(o, mapping)) { + mapping.get(o.getClass()).accept(o); + } else { + for (final Field f : getAllFields(o.getClass())) { + f.setAccessible(true); + final Object val = f.get(o); + if (!isPrimitive(val) && hasMapping(val, mapping)) { + mapping.get(val.getClass()).accept(val); + } else { + navigate(f.get(o), mapping); + } + } + } + } + + private static boolean hasMapping(Object o, CleaningRuleMap mapping) { + return mapping.containsKey(o.getClass()); + } + + private static boolean isIterable(final Class cl) { + return Iterable.class.isAssignableFrom(cl); + } + + private static boolean isPrimitive(Object o) { + return Objects.isNull(o) + || o.getClass().isPrimitive() + || o instanceof Class + || o instanceof Integer + || o instanceof Double + || o instanceof Float + || o instanceof Long + || o instanceof Boolean + || o instanceof String + || o instanceof Byte; + } + + private static List getAllFields(Class clazz) { + return getAllFields(new LinkedList<>(), clazz); + } + + private static List getAllFields(List fields, Class clazz) { + fields.addAll(Arrays.asList(clazz.getDeclaredFields())); + + final Class superclass = clazz.getSuperclass(); + if (Objects.nonNull(superclass) && superclass.getPackage().equals(Oaf.class.getPackage())) { + getAllFields(fields, superclass); + } + + return fields; + } + +} diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/AbstractMdRecordToOafMapper.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/AbstractMdRecordToOafMapper.java index ab1e89187..fc77950d0 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/AbstractMdRecordToOafMapper.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/AbstractMdRecordToOafMapper.java @@ -63,6 +63,8 @@ public abstract class AbstractMdRecordToOafMapper { protected final VocabularyGroup vocs; + private final boolean invisible; + protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4"; protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3"; protected static final Qualifier ORCID_PID_TYPE = qualifier( @@ -85,8 +87,9 @@ public abstract class AbstractMdRecordToOafMapper { protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier( "main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title"); - protected AbstractMdRecordToOafMapper(final VocabularyGroup vocs) { + protected AbstractMdRecordToOafMapper(final VocabularyGroup vocs, final boolean invisible) { this.vocs = vocs; + this.invisible = invisible; } public List processMdRecord(final String xml) { @@ -112,7 +115,7 @@ public abstract class AbstractMdRecordToOafMapper { return null; } - final DataInfo info = prepareDataInfo(doc); + final DataInfo info = prepareDataInfo(doc, invisible); final long lastUpdateTimestamp = new Date().getTime(); return createOafs(doc, type, collectedFrom, hostedBy, info, lastUpdateTimestamp); @@ -510,11 +513,11 @@ public abstract class AbstractMdRecordToOafMapper { return oaiIProvenance(identifier, baseURL, metadataNamespace, altered, datestamp, harvestDate); } - protected DataInfo prepareDataInfo(final Document doc) { + protected DataInfo prepareDataInfo(final Document doc, final boolean invisible) { final Node n = doc.selectSingleNode("//oaf:datainfo"); if (n == null) { - return dataInfo(false, null, false, false, REPOSITORY_PROVENANCE_ACTIONS, "0.9"); + return dataInfo(false, null, false, invisible, REPOSITORY_PROVENANCE_ACTIONS, "0.9"); } final String paClassId = n.valueOf("./oaf:provenanceaction/@classid"); @@ -528,7 +531,7 @@ public abstract class AbstractMdRecordToOafMapper { final String trust = n.valueOf("./oaf:trust"); return dataInfo( - deletedbyinference, inferenceprovenance, inferred, false, + deletedbyinference, inferenceprovenance, inferred, invisible, qualifier(paClassId, paClassName, paSchemeId, paSchemeName), trust); } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/GenerateEntitiesApplication.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/GenerateEntitiesApplication.java index 8e5ba9cd1..3568dc52a 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/GenerateEntitiesApplication.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/GenerateEntitiesApplication.java @@ -39,6 +39,8 @@ import eu.dnetlib.dhp.schema.oaf.Project; import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Software; +import eu.dnetlib.dhp.utils.ISLookupClientFactory; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import scala.Tuple2; public class GenerateEntitiesApplication { @@ -71,7 +73,8 @@ public class GenerateEntitiesApplication { final String isLookupUrl = parser.get("isLookupUrl"); log.info("isLookupUrl: {}", isLookupUrl); - final VocabularyGroup vocs = VocabularyGroup.loadVocsFromIS(isLookupUrl); + final ISLookUpService isLookupService = ISLookupClientFactory.getLookUpService(isLookupUrl); + final VocabularyGroup vocs = VocabularyGroup.loadVocsFromIS(isLookupService); final SparkConf conf = new SparkConf(); runWithSparkSession(conf, isSparkSessionManaged, spark -> { @@ -137,10 +140,16 @@ public class GenerateEntitiesApplication { final String type = StringUtils.substringAfter(id, ":"); switch (type.toLowerCase()) { - case "native_oaf": - return new OafToOafMapper(vocs).processMdRecord(s); - case "native_odf": - return new OdfToOafMapper(vocs).processMdRecord(s); + case "oaf-store-cleaned": + case "oaf-store-claim": + return new OafToOafMapper(vocs, false).processMdRecord(s); + case "odf-store-cleaned": + case "odf-store-claim": + return new OdfToOafMapper(vocs, false).processMdRecord(s); + case "oaf-store-intersection": + return new OafToOafMapper(vocs, true).processMdRecord(s); + case "odf-store-intersection": + return new OdfToOafMapper(vocs, true).processMdRecord(s); case "datasource": return Arrays.asList(convertFromJson(s, Datasource.class)); case "organization": diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java index 653027039..0bad89e9e 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplication.java @@ -50,6 +50,8 @@ import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.collect.Lists; + import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.common.DbClient; import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication; @@ -71,6 +73,7 @@ import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Software; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import eu.dnetlib.dhp.utils.ISLookupClientFactory; public class MigrateDbEntitiesApplication extends AbstractMigrationApplication implements Closeable { @@ -151,7 +154,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i super(hdfsPath); this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); this.lastUpdateTimestamp = new Date().getTime(); - this.vocs = VocabularyGroup.loadVocsFromIS(isLookupUrl); + this.vocs = VocabularyGroup.loadVocsFromIS(ISLookupClientFactory.getLookUpService(isLookupUrl)); } public void execute(final String sqlFile, final Function> producer) @@ -170,7 +173,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i final Datasource ds = new Datasource(); ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true)); - ds.setOriginalId(Arrays.asList(rs.getString("datasourceid"))); + ds.setOriginalId(Arrays.asList((String[]) rs.getArray("identities").getArray())); ds .setCollectedfrom( listKeyValues( diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateMongoMdstoresApplication.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateMongoMdstoresApplication.java index 00c1dc4bb..e7703bf72 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateMongoMdstoresApplication.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/MigrateMongoMdstoresApplication.java @@ -26,8 +26,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio IOUtils .toString( MigrateMongoMdstoresApplication.class - .getResourceAsStream( - "/eu/dnetlib/dhp/oa/graph/migrate_mongo_mstores_parameters.json"))); + .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_mongo_mstores_parameters.json"))); parser.parseArgument(args); final String mongoBaseUrl = parser.get("mongoBaseUrl"); @@ -60,7 +59,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio final String currentColl = entry.getValue(); for (final String xml : mdstoreClient.listRecords(currentColl)) { - emit(xml, "native_" + format); + emit(xml, String.format("%s-%s-%s", format, layout, interpretation)); } } } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OafToOafMapper.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OafToOafMapper.java index 6f91ce733..dea80fabd 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OafToOafMapper.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OafToOafMapper.java @@ -4,13 +4,7 @@ package eu.dnetlib.dhp.oa.graph.raw; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_ACCESS_MODES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_LANGUAGES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PID_TYPES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PUBLICATION_RESOURCE; -import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_RELATED_TO; -import static eu.dnetlib.dhp.schema.common.ModelConstants.PUBLICATION_DATASET; -import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_RESULT; +import static eu.dnetlib.dhp.schema.common.ModelConstants.*; import java.util.ArrayList; import java.util.List; @@ -37,8 +31,8 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty; public class OafToOafMapper extends AbstractMdRecordToOafMapper { - public OafToOafMapper(final VocabularyGroup vocs) { - super(vocs); + public OafToOafMapper(final VocabularyGroup vocs, final boolean invisible) { + super(vocs, invisible); } @Override @@ -139,7 +133,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper { instance .setAccessright(prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES)); instance.setLicense(field(doc.valueOf("//oaf:license"), info)); - instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); + instance.setRefereed(prepareQualifier(doc, "//oaf:refereed", DNET_REVIEW_LEVELS)); instance .setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info)); instance @@ -281,12 +275,12 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper { res .add( getRelation( - docId, otherId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info, + docId, otherId, RESULT_RESULT, RELATIONSHIP, IS_RELATED_TO, collectedFrom, info, lastUpdateTimestamp)); res .add( getRelation( - otherId, docId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info, + otherId, docId, RESULT_RESULT, RELATIONSHIP, IS_RELATED_TO, collectedFrom, info, lastUpdateTimestamp)); } } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OdfToOafMapper.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OdfToOafMapper.java index bbd9442e1..62f8123bb 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OdfToOafMapper.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/OdfToOafMapper.java @@ -4,19 +4,7 @@ package eu.dnetlib.dhp.oa.graph.raw; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field; import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_ACCESS_MODES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_DATA_CITE_DATE; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_DATA_CITE_RESOURCE; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_LANGUAGES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PID_TYPES; -import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PUBLICATION_RESOURCE; -import static eu.dnetlib.dhp.schema.common.ModelConstants.HAS_PARTS; -import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_PART_OF; -import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_SUPPLEMENTED_BY; -import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_SUPPLEMENT_TO; -import static eu.dnetlib.dhp.schema.common.ModelConstants.PART; -import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_RESULT; -import static eu.dnetlib.dhp.schema.common.ModelConstants.SUPPLEMENT; +import static eu.dnetlib.dhp.schema.common.ModelConstants.*; import java.util.ArrayList; import java.util.Arrays; @@ -44,8 +32,8 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper { public static final String HTTP_DX_DOI_PREIFX = "http://dx.doi.org/"; - public OdfToOafMapper(final VocabularyGroup vocs) { - super(vocs); + public OdfToOafMapper(final VocabularyGroup vocs, final boolean invisible) { + super(vocs, invisible); } @Override @@ -129,7 +117,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper { instance .setAccessright(prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES)); instance.setLicense(field(doc.valueOf("//oaf:license"), info)); - instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info)); + instance.setRefereed(prepareQualifier(doc, "//oaf:refereed", DNET_REVIEW_LEVELS)); instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info)); instance .setProcessingchargecurrency(field(doc.valueOf("//oaf:processingchargeamount/@currency"), info)); @@ -138,9 +126,16 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper { for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) { url.add(((Node) o).getText().trim()); } + for (final Object o : doc + .selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='landingPage']")) { + url.add(((Node) o).getText().trim()); + } for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) { url.add(((Node) o).getText().trim()); } + for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='landingPage']")) { + url.add(((Node) o).getText().trim()); + } for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) { url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim()); } @@ -379,11 +374,13 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper { res .addAll( prepareListStructPropsWithValidQualifier( - doc, "//datacite:identifier[@identifierType != 'URL']", "@identifierType", DNET_PID_TYPES, info)); + doc, "//datacite:identifier[@identifierType != 'URL' and @identifierType != 'landingPage']", + "@identifierType", DNET_PID_TYPES, info)); res .addAll( prepareListStructPropsWithValidQualifier( - doc, "//datacite:alternateIdentifier[@alternateIdentifierType != 'URL']", + doc, + "//datacite:alternateIdentifier[@alternateIdentifierType != 'URL' and @alternateIdentifierType != 'landingPage']", "@alternateIdentifierType", DNET_PID_TYPES, info)); return res; } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/OafMapperUtils.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/OafMapperUtils.java index 9beed2837..58f068943 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/OafMapperUtils.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/OafMapperUtils.java @@ -60,6 +60,10 @@ public class OafMapperUtils { .collect(Collectors.toList()); } + public static Qualifier unknown(final String schemeid, final String schemename) { + return qualifier("UNKNOWN", "Unknown", schemeid, schemename); + } + public static Qualifier qualifier( final String classid, final String classname, diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/Vocabulary.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/Vocabulary.java index c955ee640..9bf198c8b 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/Vocabulary.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/Vocabulary.java @@ -4,14 +4,29 @@ package eu.dnetlib.dhp.oa.graph.raw.common; import java.io.Serializable; import java.util.HashMap; import java.util.Map; +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; + +import com.google.common.collect.Maps; + +import eu.dnetlib.dhp.schema.oaf.Qualifier; public class Vocabulary implements Serializable { private final String id; private final String name; + /** + * Code to Term mappings for this Vocabulary. + */ private final Map terms = new HashMap<>(); + /** + * Synonym to Code mappings for this Vocabulary. + */ + private final Map synonyms = Maps.newHashMap(); + public Vocabulary(final String id, final String name) { this.id = id; this.name = name; @@ -30,7 +45,7 @@ public class Vocabulary implements Serializable { } public VocabularyTerm getTerm(final String id) { - return terms.get(id.toLowerCase()); + return Optional.ofNullable(id).map(s -> s.toLowerCase()).map(s -> terms.get(s)).orElse(null); } protected void addTerm(final String id, final String name) { @@ -40,4 +55,32 @@ public class Vocabulary implements Serializable { protected boolean termExists(final String id) { return terms.containsKey(id.toLowerCase()); } + + protected void addSynonym(final String syn, final String termCode) { + synonyms.put(syn, termCode.toLowerCase()); + } + + public VocabularyTerm getTermBySynonym(final String syn) { + return getTerm(synonyms.get(syn.toLowerCase())); + } + + public Qualifier getTermAsQualifier(final String termId) { + if (StringUtils.isBlank(termId)) { + return OafMapperUtils.unknown(getId(), getName()); + } else if (termExists(termId)) { + final VocabularyTerm t = getTerm(termId); + return OafMapperUtils.qualifier(t.getId(), t.getName(), getId(), getName()); + } else { + return OafMapperUtils.qualifier(termId, termId, getId(), getName()); + } + } + + public Qualifier getSynonymAsQualifier(final String syn) { + return Optional + .ofNullable(getTermBySynonym(syn)) + .map(term -> getTermAsQualifier(term.getId())) + .orElse(null); + // .orElse(OafMapperUtils.unknown(getId(), getName())); + } + } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/VocabularyGroup.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/VocabularyGroup.java index 43ed7f2d9..d9ff62596 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/VocabularyGroup.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/oa/graph/raw/common/VocabularyGroup.java @@ -1,33 +1,38 @@ package eu.dnetlib.dhp.oa.graph.raw.common; -import java.io.IOException; import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; +import java.util.*; +import java.util.stream.Collectors; -import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; -import eu.dnetlib.dhp.oa.graph.raw.GenerateEntitiesApplication; import eu.dnetlib.dhp.schema.oaf.Qualifier; -import eu.dnetlib.dhp.utils.ISLookupClientFactory; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; public class VocabularyGroup implements Serializable { - public static VocabularyGroup loadVocsFromIS(final String isLookupUrl) throws IOException, ISLookUpException { - final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookupUrl); + public static final String VOCABULARIES_XQUERY = "for $x in collection('/db/DRIVER/VocabularyDSResources/VocabularyDSResourceType') \n" + + + "let $vocid := $x//VOCABULARY_NAME/@code\n" + + "let $vocname := $x//VOCABULARY_NAME/text()\n" + + "for $term in ($x//TERM)\n" + + "return concat($vocid,' @=@ ',$vocname,' @=@ ',$term/@code,' @=@ ',$term/@english_name)"; - final String xquery = IOUtils - .toString( - GenerateEntitiesApplication.class - .getResourceAsStream("/eu/dnetlib/dhp/oa/graph/xquery/load_vocabularies.xquery")); + public static final String VOCABULARY_SYNONYMS_XQUERY = "for $x in collection('/db/DRIVER/VocabularyDSResources/VocabularyDSResourceType')\n" + + + "let $vocid := $x//VOCABULARY_NAME/@code\n" + + "let $vocname := $x//VOCABULARY_NAME/text()\n" + + "for $term in ($x//TERM)\n" + + "for $syn in ($term//SYNONYM/@term)\n" + + "return concat($vocid,' @=@ ',$term/@code,' @=@ ', $syn)\n"; + + public static VocabularyGroup loadVocsFromIS(ISLookUpService isLookUpService) throws ISLookUpException { final VocabularyGroup vocs = new VocabularyGroup(); - for (final String s : isLookUpService.quickSearchProfile(xquery)) { + for (final String s : isLookUpService.quickSearchProfile(VOCABULARIES_XQUERY)) { final String[] arr = s.split("@=@"); if (arr.length == 4) { final String vocId = arr[0].trim(); @@ -40,6 +45,19 @@ public class VocabularyGroup implements Serializable { } vocs.addTerm(vocId, termId, termName); + // vocs.addSynonyms(vocId, termId, termId); + } + } + + for (final String s : isLookUpService.quickSearchProfile(VOCABULARY_SYNONYMS_XQUERY)) { + final String[] arr = s.split("@=@"); + if (arr.length == 3) { + final String vocId = arr[0].trim(); + final String termId = arr[1].trim(); + final String syn = arr[2].trim(); + + vocs.addSynonyms(vocId, termId, syn); + // vocs.addSynonyms(vocId, termId, termId); } } @@ -66,16 +84,37 @@ public class VocabularyGroup implements Serializable { } } - public Qualifier getTermAsQualifier(final String vocId, final String id) { - if (StringUtils.isBlank(id)) { - return OafMapperUtils.qualifier("UNKNOWN", "UNKNOWN", vocId, vocId); - } else if (termExists(vocId, id)) { - final Vocabulary v = vocs.get(vocId.toLowerCase()); - final VocabularyTerm t = v.getTerm(id); - return OafMapperUtils.qualifier(t.getId(), t.getName(), v.getId(), v.getName()); - } else { - return OafMapperUtils.qualifier(id, id, vocId, vocId); + public Set getTerms(String vocId) { + if (!vocabularyExists(vocId)) { + return new HashSet<>(); } + return vocs + .get(vocId.toLowerCase()) + .getTerms() + .values() + .stream() + .map(t -> t.getId()) + .collect(Collectors.toCollection(HashSet::new)); + } + + public Qualifier lookup(String vocId, String id) { + return Optional + .ofNullable(getSynonymAsQualifier(vocId, id)) + .orElse(getTermAsQualifier(vocId, id)); + } + + public Qualifier getTermAsQualifier(final String vocId, final String id) { + if (vocabularyExists(vocId)) { + return vocs.get(vocId.toLowerCase()).getTermAsQualifier(id); + } + return OafMapperUtils.qualifier(id, id, "", ""); + } + + public Qualifier getSynonymAsQualifier(final String vocId, final String syn) { + if (StringUtils.isBlank(vocId)) { + return OafMapperUtils.unknown("", ""); + } + return vocs.get(vocId.toLowerCase()).getSynonymAsQualifier(syn); } public boolean termExists(final String vocId, final String id) { @@ -86,4 +125,16 @@ public class VocabularyGroup implements Serializable { return vocs.containsKey(vocId.toLowerCase()); } + private void addSynonyms(final String vocId, final String termId, final String syn) { + String id = Optional + .ofNullable(vocId) + .map(s -> s.toLowerCase()) + .orElseThrow( + () -> new IllegalArgumentException(String.format("empty vocabulary id for [term:%s, synonym:%s]"))); + Optional + .ofNullable(vocs.get(id)) + .orElseThrow(() -> new IllegalArgumentException("missing vocabulary id: " + vocId)) + .addSynonym(syn.toLowerCase(), termId); + } + } diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/config-default.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/config-default.xml new file mode 100644 index 000000000..2e0ed9aee --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/config-default.xml @@ -0,0 +1,18 @@ + + + jobTracker + yarnRM + + + nameNode + hdfs://nameservice1 + + + oozie.use.system.libpath + true + + + oozie.action.sharelib.for.spark + spark2 + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/workflow.xml new file mode 100644 index 000000000..7329df29a --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/clean/oozie_app/workflow.xml @@ -0,0 +1,281 @@ + + + + + graphInputPath + the input path to read graph content + + + graphOutputPath + the target path to store cleaned graph + + + isLookupUrl + the address of the lookUp service + + + + sparkDriverMemory + memory for driver process + + + sparkExecutorMemory + memory for individual executor + + + sparkExecutorCores + number of cores used by single executor + + + oozieActionShareLibForSpark2 + oozie action sharelib for spark 2.* + + + spark2ExtraListeners + com.cloudera.spark.lineage.NavigatorAppListener + spark 2.* extra listeners classname + + + spark2SqlQueryExecutionListeners + com.cloudera.spark.lineage.NavigatorQueryListener + spark 2.* sql query execution listeners classname + + + spark2YarnHistoryServerAddress + spark 2.* yarn history server address + + + spark2EventLogDir + spark 2.* event log dir location + + + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] + + + + + + + + + + + + + + + + yarn + cluster + Clean publications + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/publication + --outputPath${graphOutputPath}/publication + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Publication + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean datasets + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/dataset + --outputPath${graphOutputPath}/dataset + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Dataset + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean otherresearchproducts + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/otherresearchproduct + --outputPath${graphOutputPath}/otherresearchproduct + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.OtherResearchProduct + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean softwares + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/software + --outputPath${graphOutputPath}/software + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Software + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean datasources + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/datasource + --outputPath${graphOutputPath}/datasource + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Datasource + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean organizations + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/organization + --outputPath${graphOutputPath}/organization + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Organization + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean projects + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/project + --outputPath${graphOutputPath}/project + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Project + --isLookupUrl${isLookupUrl} + + + + + + + + yarn + cluster + Clean relations + eu.dnetlib.dhp.oa.graph.clean.CleanGraphSparkJob + dhp-graph-mapper-${projectVersion}.jar + + --executor-cores=${sparkExecutorCores} + --executor-memory=${sparkExecutorMemory} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners=${spark2ExtraListeners} + --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners} + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + --conf spark.sql.shuffle.partitions=7680 + + --inputPath${graphInputPath}/relation + --outputPath${graphOutputPath}/relation + --graphTableClassNameeu.dnetlib.dhp.schema.oaf.Relation + --isLookupUrl${isLookupUrl} + + + + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/hive/oozie_app/lib/scripts/postprocessing.sql b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/hive/oozie_app/lib/scripts/postprocessing.sql index 6c49679cd..7bec2fe04 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/hive/oozie_app/lib/scripts/postprocessing.sql +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/hive/oozie_app/lib/scripts/postprocessing.sql @@ -1,10 +1,10 @@ DROP VIEW IF EXISTS ${hiveDbName}.result; CREATE VIEW IF NOT EXISTS result as - select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.publication p + select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.publication p union all - select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.dataset d + select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.dataset d union all - select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.software s + select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.software s union all - select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.otherresearchproduct o; + select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.otherresearchproduct o; diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/input_clean_graph_parameters.json b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/input_clean_graph_parameters.json new file mode 100644 index 000000000..9cfed1e91 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/input_clean_graph_parameters.json @@ -0,0 +1,32 @@ +[ + { + "paramName": "issm", + "paramLongName": "isSparkSessionManaged", + "paramDescription": "when true will stop SparkSession after job execution", + "paramRequired": false + }, + { + "paramName": "in", + "paramLongName": "inputPath", + "paramDescription": "the path to the graph data dump to read", + "paramRequired": true + }, + { + "paramName": "out", + "paramLongName": "outputPath", + "paramDescription": "the path to store the output graph", + "paramRequired": true + }, + { + "paramName": "isu", + "paramLongName": "isLookupUrl", + "paramDescription": "url to the ISLookup Service", + "paramRequired": true + }, + { + "paramName": "class", + "paramLongName": "graphTableClassName", + "paramDescription": "class name moelling the graph table", + "paramRequired": true + } +] diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml index c2bea9f8a..9a7e36570 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml @@ -210,6 +210,23 @@ --mdLayoutstore --mdInterpretationcleaned + + + + + + + + + + eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication + --hdfsPath${contentPath}/oaf_records_invisible + --mongoBaseUrl${mongoURL} + --mongoDb${mongoDb} + --mdFormatOAF + --mdLayoutstore + --mdInterpretationintersection + @@ -237,7 +254,7 @@ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} - --sourcePaths${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims + --sourcePaths${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims,${contentPath}/oaf_records_invisible --targetPath${workingDir}/entities_claim --isLookupUrl${isLookupUrl} diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/xquery/load_vocabularies_synonyms.xquery b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/xquery/load_vocabularies_synonyms.xquery new file mode 100644 index 000000000..f4f8cb45d --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/xquery/load_vocabularies_synonyms.xquery @@ -0,0 +1,6 @@ +for $x in collection('/db/DRIVER/VocabularyDSResources/VocabularyDSResourceType') +let $vocid := $x//VOCABULARY_NAME/@code +let $vocname := $x//VOCABULARY_NAME/text() + for $term in ($x//TERM) + for $syn in ($term//SYNONYM/@term) + return concat($vocid,' @=@ ',$term/@code,' @=@ ', $syn) diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/clean/CleaningFunctionTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/clean/CleaningFunctionTest.java new file mode 100644 index 000000000..1b21ce2d3 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/clean/CleaningFunctionTest.java @@ -0,0 +1,106 @@ + +package eu.dnetlib.dhp.oa.graph.clean; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.lenient; + +import java.io.IOException; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup; +import eu.dnetlib.dhp.schema.oaf.Publication; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; + +@ExtendWith(MockitoExtension.class) +public class CleaningFunctionTest { + + public static final ObjectMapper MAPPER = new ObjectMapper(); + + @Mock + private ISLookUpService isLookUpService; + + private VocabularyGroup vocabularies; + + private CleaningRuleMap mapping; + + @BeforeEach + public void setUp() throws ISLookUpException, IOException { + lenient().when(isLookUpService.quickSearchProfile(VocabularyGroup.VOCABULARIES_XQUERY)).thenReturn(vocs()); + lenient() + .when(isLookUpService.quickSearchProfile(VocabularyGroup.VOCABULARY_SYNONYMS_XQUERY)) + .thenReturn(synonyms()); + + vocabularies = VocabularyGroup.loadVocsFromIS(isLookUpService); + mapping = CleaningRuleMap.create(vocabularies); + } + + @Test + public void testCleaning() throws Exception { + + assertNotNull(vocabularies); + assertNotNull(mapping); + + String json = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/graph/clean/result.json")); + Publication p_in = MAPPER.readValue(json, Publication.class); + + Publication p_out = OafCleaner.apply(p_in, mapping); + + assertNotNull(p_out); + + assertEquals("und", p_out.getLanguage().getClassid()); + assertEquals("Undetermined", p_out.getLanguage().getClassname()); + + assertEquals("0018", p_out.getInstance().get(0).getInstancetype().getClassid()); + assertEquals("Annotation", p_out.getInstance().get(0).getInstancetype().getClassname()); + + assertEquals("CLOSED", p_out.getInstance().get(0).getAccessright().getClassid()); + assertEquals("Closed Access", p_out.getInstance().get(0).getAccessright().getClassname()); + + Set pidTerms = vocabularies.getTerms("dnet:pid_types"); + assertTrue( + p_out + .getPid() + .stream() + .map(p -> p.getQualifier()) + .allMatch(q -> pidTerms.contains(q.getClassid()))); + + // TODO add more assertions to verity the cleaned values + System.out.println(MAPPER.writeValueAsString(p_out)); + + /* + * assertTrue( p_out .getPid() .stream() .allMatch(sp -> StringUtils.isNotBlank(sp.getValue()))); + */ + } + + private Stream getAuthorPidTypes(Publication pub) { + return pub + .getAuthor() + .stream() + .map(a -> a.getPid()) + .flatMap(p -> p.stream()) + .map(s -> s.getQualifier()); + } + + private List vocs() throws IOException { + return IOUtils + .readLines(CleaningFunctionTest.class.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/clean/terms.txt")); + } + + private List synonyms() throws IOException { + return IOUtils + .readLines(CleaningFunctionTest.class.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/clean/synonyms.txt")); + } +} diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MappersTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MappersTest.java index dad427ce4..9bd20303f 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MappersTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MappersTest.java @@ -2,6 +2,7 @@ package eu.dnetlib.dhp.oa.graph.raw; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyString; @@ -55,7 +56,7 @@ public class MappersTest { final String xml = IOUtils.toString(getClass().getResourceAsStream("oaf_record.xml")); - final List list = new OafToOafMapper(vocs).processMdRecord(xml); + final List list = new OafToOafMapper(vocs, false).processMdRecord(xml); assertEquals(3, list.size()); assertTrue(list.get(0) instanceof Publication); @@ -69,6 +70,7 @@ public class MappersTest { assertValidId(p.getId()); assertValidId(p.getCollectedfrom().get(0).getKey()); assertTrue(StringUtils.isNotBlank(p.getTitle().get(0).getValue())); + assertFalse(p.getDataInfo().getInvisible()); assertTrue(p.getAuthor().size() > 0); final Optional author = p @@ -134,11 +136,27 @@ public class MappersTest { // System.out.println(new ObjectMapper().writeValueAsString(r2)); } + @Test + void testPublicationInvisible() throws IOException { + + final String xml = IOUtils.toString(getClass().getResourceAsStream("oaf_record.xml")); + + final List list = new OafToOafMapper(vocs, true).processMdRecord(xml); + + assertTrue(list.size() > 0); + assertTrue(list.get(0) instanceof Publication); + + final Publication p = (Publication) list.get(0); + + assertTrue(p.getDataInfo().getInvisible()); + + } + @Test void testDataset() throws IOException { final String xml = IOUtils.toString(getClass().getResourceAsStream("odf_dataset.xml")); - final List list = new OdfToOafMapper(vocs).processMdRecord(xml); + final List list = new OdfToOafMapper(vocs, false).processMdRecord(xml); assertEquals(3, list.size()); assertTrue(list.get(0) instanceof Dataset); @@ -220,7 +238,7 @@ public class MappersTest { void testSoftware() throws IOException { final String xml = IOUtils.toString(getClass().getResourceAsStream("odf_software.xml")); - final List list = new OdfToOafMapper(vocs).processMdRecord(xml); + final List list = new OdfToOafMapper(vocs, false).processMdRecord(xml); assertEquals(1, list.size()); assertTrue(list.get(0) instanceof Software); diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/reflections/ReflectionTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/reflections/ReflectionTest.java new file mode 100644 index 000000000..110fabf45 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/reflections/ReflectionTest.java @@ -0,0 +1,200 @@ + +package eu.dnetlib.dhp.oa.graph.reflections; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.Test; + +public class ReflectionTest { + + private final Cleaner cleaner = new Cleaner(); + + @Test + void testObject() throws Exception { + final Publication pub = new Publication(); + pub.setTitle("openaire guidelines"); + pub.getAuthors().add(new Author("Michele Artini", new Prop("aa-001", "orcid"))); + pub.getAuthors().add(new Author("Claudio Atzori", new Prop("aa-002", "orcid"))); + pub.getAuthors().add(new Author("Alessia Bardi", new Prop("aa-003", "orcid"))); + pub.getSubjects().add(new Prop("infrastructures", "keyword")); + pub.getSubjects().add(new Prop("digital libraries", "keyword")); + + cleaner.clean(pub); + + System.out.println(pub); + + assertEquals("OPENAIRE GUIDELINES", pub.getTitle()); + + assertEquals("MICHELE ARTINI", pub.getAuthors().get(0).getName()); + assertEquals("CLAUDIO ATZORI", pub.getAuthors().get(1).getName()); + assertEquals("ALESSIA BARDI", pub.getAuthors().get(2).getName()); + + assertEquals("dnet:aa-001", pub.getAuthors().get(0).getId().getId()); + assertEquals("dnet:aa-002", pub.getAuthors().get(1).getId().getId()); + assertEquals("dnet:aa-003", pub.getAuthors().get(2).getId().getId()); + assertEquals("dnet:orcid", pub.getAuthors().get(0).getId().getName()); + assertEquals("dnet:orcid", pub.getAuthors().get(1).getId().getName()); + assertEquals("dnet:orcid", pub.getAuthors().get(2).getId().getName()); + + assertEquals("dnet:infrastructures", pub.getSubjects().get(0).getId()); + assertEquals("dnet:keyword", pub.getSubjects().get(0).getName()); + assertEquals("dnet:digital libraries", pub.getSubjects().get(1).getId()); + assertEquals("dnet:keyword", pub.getSubjects().get(1).getName()); + } + +} + +class Cleaner { + + public void clean(final Object o) throws IllegalArgumentException, IllegalAccessException { + if (isPrimitive(o)) { + return; + } else if (isIterable(o.getClass())) { + for (final Object elem : (Iterable) o) { + clean(elem); + } + } else if (hasMapping(o)) { + mapObject(o); + } else { + for (final Field f : o.getClass().getDeclaredFields()) { + f.setAccessible(true); + final Object val = f.get(o); + if (isPrimitive(val)) { + f.set(o, cleanValue(f.get(o))); + } else if (hasMapping(val)) { + mapObject(val); + } else { + clean(f.get(o)); + } + } + } + } + + private boolean hasMapping(final Object o) { + return o.getClass() == Prop.class; + } + + private void mapObject(final Object o) { + if (o.getClass() == Prop.class) { + ((Prop) o).setId("dnet:" + ((Prop) o).getId()); + ((Prop) o).setName("dnet:" + ((Prop) o).getName()); + } + + } + + private Object cleanValue(final Object o) { + if (o.getClass() == String.class) { + return ((String) o).toUpperCase(); + } else { + return o; + } + + } + + private boolean isIterable(final Class cl) { + return Iterable.class.isAssignableFrom(cl); + } + + private boolean isPrimitive(final Object o) { + return o.getClass() == String.class; + } +} + +class Publication { + + private String title; + private final List authors = new ArrayList<>(); + private final List subjects = new ArrayList<>(); + + public String getTitle() { + return title; + } + + public void setTitle(final String title) { + this.title = title; + } + + public List getAuthors() { + return authors; + } + + public List getSubjects() { + return subjects; + } + + @Override + public String toString() { + return String.format("Publication [title=%s, authors=%s, subjects=%s]", title, authors, subjects); + } + +} + +class Prop { + + private String id; + private String name; + + public Prop(final String id, final String name) { + this.id = id; + this.name = name; + } + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + @Override + public String toString() { + return String.format("Prop [id=%s, name=%s]", id, name); + } + +} + +class Author { + + private String name; + private Prop id; + + public Author(final String name, final Prop id) { + this.name = name; + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public Prop getId() { + return id; + } + + public void setId(final Prop id) { + this.id = id; + } + + @Override + public String toString() { + return String.format("Author [name=%s, id=%s]", name, id); + } + +} diff --git a/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/result.json b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/result.json new file mode 100644 index 000000000..b63a12f61 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/result.json @@ -0,0 +1,757 @@ +{ + "author": [ + { + "affiliation": [ + ], + "fullname": "Brien, Tom", + "name": "Tom", + "pid": [ + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "ORCID12", + "classname": "ORCID12", + "schemeid": "dnet:pid_types", + "schemename": "dnet:pid_types" + }, + "value": "0000-0001-9613-6639" + } + ], + "rank": 1, + "surname": "Brien" + }, + { + "affiliation": [ + ], + "fullname": "Ade, Peter", + "name": "Peter", + "pid": [ + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "xyz", + "classname": "XYZ", + "schemeid": "dnet:pid_types", + "schemename": "dnet:pid_types" + }, + "value": "qwerty" + } + ], + "rank": 2, + "surname": "Ade" + }, + { + "affiliation": [ + ], + "fullname": "Barry, Peter S.", + "name": "Peter S.", + "pid": [ + ], + "rank": 3, + "surname": "Barry" + }, + { + "affiliation": [ + ], + "fullname": "Dunscombe, Chris J.", + "name": "Chris J.", + "pid": [ + ], + "rank": 4, + "surname": "Dunscombe" + }, + { + "affiliation": [ + ], + "fullname": "Leadley, David R.", + "name": "David R.", + "pid": [ + ], + "rank": 5, + "surname": "Leadley" + }, + { + "affiliation": [ + ], + "fullname": "Morozov, Dmitry V.", + "name": "Dmitry V.", + "pid": [ + ], + "rank": 6, + "surname": "Morozov" + }, + { + "affiliation": [ + ], + "fullname": "Myronov, Maksym", + "name": "Maksym", + "pid": [ + ], + "rank": 7, + "surname": "Myronov" + }, + { + "affiliation": [ + ], + "fullname": "Parker, Evan", + "name": "Evan", + "pid": [ + ], + "rank": 8, + "surname": "Parker" + }, + { + "affiliation": [ + ], + "fullname": "Prest, Martin J.", + "name": "Martin J.", + "pid": [ + ], + "rank": 9, + "surname": "Prest" + }, + { + "affiliation": [ + ], + "fullname": "Prunnila, Mika", + "name": "Mika", + "pid": [ + ], + "rank": 10, + "surname": "Prunnila" + }, + { + "affiliation": [ + ], + "fullname": "Sudiwala, Rashmi V.", + "name": "Rashmi V.", + "pid": [ + ], + "rank": 11, + "surname": "Sudiwala" + }, + { + "affiliation": [ + ], + "fullname": "Whall, Terry E.", + "name": "Terry E.", + "pid": [ + ], + "rank": 12, + "surname": "Whall" + }, + { + "affiliation": [ + ], + "fullname": "Mauskopf", + "name": "", + "pid": [ + ], + "rank": 13, + "surname": "" + }, + { + "affiliation": [ + ], + "fullname": " P. D. ", + "name": "", + "pid": [ + ], + "rank": 14, + "surname": "" + } + ], + "bestaccessright": { + "classid": "CLOSED", + "classname": "Closed Access", + "schemeid": "dnet:access_modes", + "schemename": "dnet:access_modes" + }, + "collectedfrom": [ + { + "key": "10|CSC_________::a2b9ce8435390bcbfc05f3cae3948747", + "value": "VIRTA" + } + ], + "context": [ + ], + "contributor": [ + ], + "country": [ + ], + "coverage": [ + ], + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "dateofacceptance": { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "value": "2016-01-01" + }, + "dateofcollection": "", + "dateoftransformation": "2020-04-22T12:34:08.009Z", + "description": [ + ], + "externalReference": [ + ], + "extraInfo": [ + ], + "format": [ + ], + "fulltext": [ + ], + "id": "50|CSC_________::2250a70c903c6ac6e4c01438259e9375", + "instance": [ + { + "accessright": { + "classid": "CLOSED", + "classname": "CLOSED", + "schemeid": "dnet:access_modes", + "schemename": "dnet:access_modes" + }, + "collectedfrom": { + "key": "10|CSC_________::a2b9ce8435390bcbfc05f3cae3948747", + "value": "VIRTA" + }, + "dateofacceptance": { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "value": "2016-01-01" + }, + "distributionlocation": "", + "hostedby": { + "key": "10|CSC_________::a2b9ce8435390bcbfc05f3cae3948747", + "value": "VIRTA" + }, + "instancetype": { + "classid": "Comment/debate", + "classname": "Comment/debate", + "schemeid": "dnet:publication_resource", + "schemename": "dnet:publication_resource" + }, + "url": [ + "http://juuli.fi/Record/0275158616", + "http://dx.doi.org/10.1007/s109090161569x" + ] + } + ], + "journal": { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "edition": "", + "ep": " 7", + "iss": "9 March", + "issnLinking": "", + "issnOnline": "", + "issnPrinted": "0022-2291", + "name": "Journal of Low Temperature Physics - Early Acces", + "sp": "1 ", + "vol": "" + }, + "language": { + "classid": "UNKNOWN", + "classname": "UNKNOWN", + "schemeid": "dnet:languages", + "schemename": "dnet:languages" + }, + "lastupdatetimestamp": 1591283286319, + "oaiprovenance": { + "originDescription": { + "altered": true, + "baseURL": "https%3A%2F%2Fvirta-jtp.csc.fi%2Fapi%2Fcerif", + "datestamp": "2019-07-30", + "harvestDate": "2020-04-22T11:04:38.685Z", + "identifier": "oai:virta-jtp.csc.fi:Publications/0275158616", + "metadataNamespace": "" + } + }, + "originalId": [ + "CSC_________::2250a70c903c6ac6e4c01438259e9375" + ], + "pid": [ + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "doi", + "classname": "doi", + "schemeid": "dnet:pid_types", + "schemename": "dnet:pid_types" + }, + "value": "10.1007/s109090161569x" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "doi", + "classname": "doi", + "schemeid": "dnet:pid_types", + "schemename": "dnet:pid_types" + }, + "value": "10.1007/s109090161569x" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "doi", + "classname": "doi", + "schemeid": "dnet:pid_types", + "schemename": "dnet:pid_types" + }, + "value": "" + } + ], + "relevantdate": [ + ], + "resourcetype": { + "classid": "0001", + "classname": "0001", + "schemeid": "dnet:dataCite_resource", + "schemename": "dnet:dataCite_resource" + }, + "resulttype": { + "classid": "publication", + "classname": "publication", + "schemeid": "dnet:result_typologies", + "schemename": "dnet:result_typologies" + }, + "source": [ + ], + "subject": [ + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "ta213" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "infrared detectors" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "lens antennas" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "silicon" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "slot antennas" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "strained silicon" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "cold electron bolometers" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "doped silicon" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "measure noise" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "noise equivalent power" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "optical characterisation" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "optical response" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "photon noise" + }, + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "", + "classname": "", + "schemeid": "", + "schemename": "" + }, + "value": "silicon absorbers" + } + ], + "title": [ + { + "dataInfo": { + "deletedbyinference": false, + "inferenceprovenance": "", + "inferred": false, + "invisible": false, + "provenanceaction": { + "classid": "sysimport:crosswalk:datasetarchive", + "classname": "sysimport:crosswalk:datasetarchive", + "schemeid": "dnet:provenanceActions", + "schemename": "dnet:provenanceActions" + }, + "trust": "0.9" + }, + "qualifier": { + "classid": "main title", + "classname": "main title", + "schemeid": "dnet:dataCite_title", + "schemename": "dnet:dataCite_title" + }, + "value": "Optical response of strained- and unstrained-silicon cold-electron bolometers" + } + ] +} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/synonyms.txt b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/synonyms.txt new file mode 100644 index 000000000..05484c8e5 --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/synonyms.txt @@ -0,0 +1,1233 @@ +dnet:access_modes @=@ CLOSED @=@ http://purl.org/coar/access_right/c_14cb +dnet:access_modes @=@ CLOSED @=@ info:eu-repo/semantics/closedAccess +dnet:access_modes @=@ EMBARGO @=@ http://purl.org/coar/access_right/c_f1cf +dnet:access_modes @=@ EMBARGO @=@ info:eu-repo/semantics/embargoedAccess +dnet:access_modes @=@ OPEN @=@ Creative Commons License [CC BY-NC-ND] http://creativecommons.org/licenses/by-nc-nd/3.0/de/ +dnet:access_modes @=@ OPEN @=@ Creative commons +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by-nc-nd/3.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by-nc/3.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by-sa/3.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by-sa/4.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by/3.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by/3.0/us/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/licenses/by/4.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/publicdomain/zero/1.0/ +dnet:access_modes @=@ OPEN @=@ http://creativecommons.org/publicdomain/zero/1.0/ & http://www.canadensys.net/norms +dnet:access_modes @=@ OPEN @=@ http://purl.org/coar/access_right/c_abf2 +dnet:access_modes @=@ OPEN @=@ https://creativecommons.org/licenses/by-nc/4.0/ +dnet:access_modes @=@ OPEN @=@ info:eu-repo/semantics/openAccess +dnet:access_modes @=@ OPEN @=@ open_access +dnet:access_modes @=@ RESTRICTED @=@ http://purl.org/coar/access_right/c_16ec +dnet:access_modes @=@ RESTRICTED @=@ info:eu-repo/semantics/restrictedAccess +dnet:compatibilityLevel @=@ openaire-pub_4.0 @=@ openaire4.0 +dnet:subject_classification_typologies @=@ jel @=@ jelElement +dnet:publication_resource @=@ 0018 @=@ Comment/debate +dnet:publication_resource @=@ 0018 @=@ http://purl.org/coar/resource_type/c_1162 +dnet:publication_resource @=@ 0018 @=@ info:eu-repo/semantics/annotation +dnet:publication_resource @=@ 0001 @=@ A1 Alkuperäisartikkeli tieteellisessä aikakauslehdessä +dnet:publication_resource @=@ 0001 @=@ Article +dnet:publication_resource @=@ 0001 @=@ Article (author) +dnet:publication_resource @=@ 0001 @=@ Article - letter to the editor +dnet:publication_resource @=@ 0001 @=@ Article / Letter to editor +dnet:publication_resource @=@ 0001 @=@ Article / Letter to the editor +dnet:publication_resource @=@ 0001 @=@ Article / Newspaper +dnet:publication_resource @=@ 0001 @=@ Article in journal +dnet:publication_resource @=@ 0001 @=@ Article in monograph or in proceedings +dnet:publication_resource @=@ 0001 @=@ Article in proceedings +dnet:publication_resource @=@ 0001 @=@ Article-letter to the editor +dnet:publication_resource @=@ 0001 @=@ Article/Letter to editor +dnet:publication_resource @=@ 0001 @=@ Articolo +dnet:publication_resource @=@ 0001 @=@ Artículo +dnet:publication_resource @=@ 0001 @=@ Aufsatz +dnet:publication_resource @=@ 0001 @=@ Clinical Study +dnet:publication_resource @=@ 0001 @=@ Institutional Series +dnet:publication_resource @=@ 0001 @=@ International Journal +dnet:publication_resource @=@ 0001 @=@ International Journal Abstract +dnet:publication_resource @=@ 0001 @=@ International Journal ISI/JCR +dnet:publication_resource @=@ 0001 @=@ Journal (full / special issue) +dnet:publication_resource @=@ 0001 @=@ Journal Article/Review +dnet:publication_resource @=@ 0001 @=@ Journal article +dnet:publication_resource @=@ 0001 @=@ Journal article (on-line or printed) +dnet:publication_resource @=@ 0001 @=@ Journal articles +dnet:publication_resource @=@ 0001 @=@ Journal paper +dnet:publication_resource @=@ 0001 @=@ National Journal +dnet:publication_resource @=@ 0001 @=@ Original article (non peer-reviewed) +dnet:publication_resource @=@ 0001 @=@ Original article (peer-reviewed) +dnet:publication_resource @=@ 0001 @=@ Peer-reviewed Article +dnet:publication_resource @=@ 0001 @=@ Published Journal Article +dnet:publication_resource @=@ 0001 @=@ Research Article +dnet:publication_resource @=@ 0001 @=@ Review article (non peer-reviewed) +dnet:publication_resource @=@ 0001 @=@ Review article (peer-reviewed) +dnet:publication_resource @=@ 0001 @=@ Volumes Edited / Special Issues +dnet:publication_resource @=@ 0001 @=@ article in non peer-reviewed journal +dnet:publication_resource @=@ 0001 @=@ article in peer-reviewed journal +dnet:publication_resource @=@ 0001 @=@ article-commentary +dnet:publication_resource @=@ 0001 @=@ article_site_web +dnet:publication_resource @=@ 0001 @=@ doc-type:Journal Article +dnet:publication_resource @=@ 0001 @=@ doc-type:article +dnet:publication_resource @=@ 0001 @=@ http://purl.org/coar/resource_type/c_2df8fbb1 +dnet:publication_resource @=@ 0001 @=@ http://purl.org/coar/resource_type/c_545b +dnet:publication_resource @=@ 0001 @=@ http://purl.org/coar/resource_type/c_6501 +dnet:publication_resource @=@ 0001 @=@ http://purl.org/coar/resource_type/c_7877 +dnet:publication_resource @=@ 0001 @=@ in-brief +dnet:publication_resource @=@ 0001 @=@ info:eu-repo/semantics/article +dnet:publication_resource @=@ 0001 @=@ journal-article +dnet:publication_resource @=@ 0001 @=@ journalArticle +dnet:publication_resource @=@ 0001 @=@ journal_article +dnet:publication_resource @=@ 0001 @=@ letter +dnet:publication_resource @=@ 0001 @=@ non peer-reviewed article +dnet:publication_resource @=@ 0001 @=@ partial-retraction +dnet:publication_resource @=@ 0001 @=@ proceeding with peer review +dnet:publication_resource @=@ 0001 @=@ publication-article +dnet:publication_resource @=@ 0001 @=@ rapid-communication +dnet:publication_resource @=@ 0001 @=@ reply +dnet:publication_resource @=@ 0001 @=@ research-article +dnet:publication_resource @=@ 0001 @=@ retraction +dnet:publication_resource @=@ 0001 @=@ review-article +dnet:publication_resource @=@ 0001 @=@ text (article) +dnet:publication_resource @=@ 0001 @=@ Статья +dnet:publication_resource @=@ 0001 @=@ ArticleArtikel +dnet:publication_resource @=@ 0033 @=@ AUDIOVISUAL_DOCUMENT +dnet:publication_resource @=@ 0033 @=@ Audiovisual/Audiovisual +dnet:publication_resource @=@ 0033 @=@ http://purl.org/coar/resource_type/c_c513 +dnet:publication_resource @=@ 0008 @=@ Bachelor's +dnet:publication_resource @=@ 0008 @=@ Bachelor's Degree +dnet:publication_resource @=@ 0008 @=@ Bachelors Thesis +dnet:publication_resource @=@ 0008 @=@ Proyecto fin de carrera +dnet:publication_resource @=@ 0008 @=@ Undergraduate Thesis +dnet:publication_resource @=@ 0008 @=@ http://purl.org/coar/resource_type/c_7a1f +dnet:publication_resource @=@ 0008 @=@ info:eu-repo/semantics/bachelorThesis +dnet:publication_resource @=@ 0008 @=@ выпускная бакалаврская работа +dnet:publication_resource @=@ 0002 @=@ Book (monograph) +dnet:publication_resource @=@ 0002 @=@ Book (non peer-reviewed) +dnet:publication_resource @=@ 0002 @=@ Book (peer-reviewed) +dnet:publication_resource @=@ 0002 @=@ Book - monograph - editorial book +dnet:publication_resource @=@ 0002 @=@ Book Section +dnet:publication_resource @=@ 0002 @=@ Book as author +dnet:publication_resource @=@ 0002 @=@ Buch +dnet:publication_resource @=@ 0002 @=@ International Book/Monograph +dnet:publication_resource @=@ 0002 @=@ Libro +dnet:publication_resource @=@ 0002 @=@ Monografia +dnet:publication_resource @=@ 0002 @=@ Monograph +dnet:publication_resource @=@ 0002 @=@ National Book/Monograph +dnet:publication_resource @=@ 0002 @=@ atlas +dnet:publication_resource @=@ 0002 @=@ book +dnet:publication_resource @=@ 0002 @=@ book-series +dnet:publication_resource @=@ 0002 @=@ book-set +dnet:publication_resource @=@ 0002 @=@ book-track +dnet:publication_resource @=@ 0002 @=@ book_series +dnet:publication_resource @=@ 0002 @=@ book_title +dnet:publication_resource @=@ 0002 @=@ doc-type:book +dnet:publication_resource @=@ 0002 @=@ edited-book +dnet:publication_resource @=@ 0002 @=@ http://purl.org/coar/resource_type/c_2f33 +dnet:publication_resource @=@ 0002 @=@ info:eu-repo/semantics/book +dnet:publication_resource @=@ 0002 @=@ ouvrage +dnet:publication_resource @=@ 0002 @=@ publication-book +dnet:publication_resource @=@ 0002 @=@ reference-book +dnet:publication_resource @=@ 0002 @=@ scientific book +dnet:publication_resource @=@ 0002 @=@ Монография +dnet:publication_resource @=@ 0002 @=@ Учебник +dnet:publication_resource @=@ 0037 @=@ clinicalTrial +dnet:publication_resource @=@ 0037 @=@ http://purl.org/coar/resource_type/c_cb28 +dnet:publication_resource @=@ 0022 @=@ collection +dnet:publication_resource @=@ 0004 @=@ A4 Artikkeli konferenssijulkaisussa +dnet:publication_resource @=@ 0004 @=@ Comunicación de congreso +dnet:publication_resource @=@ 0004 @=@ Conference Paper +dnet:publication_resource @=@ 0004 @=@ Conference Paper/Proceeding/Abstract +dnet:publication_resource @=@ 0004 @=@ Conference Proceedings +dnet:publication_resource @=@ 0004 @=@ Conference article +dnet:publication_resource @=@ 0004 @=@ Conference contribution +dnet:publication_resource @=@ 0004 @=@ Conference lecture +dnet:publication_resource @=@ 0004 @=@ Conference or Workshop Item +dnet:publication_resource @=@ 0004 @=@ Conference paper, poster, etc. +dnet:publication_resource @=@ 0004 @=@ Conference papers +dnet:publication_resource @=@ 0004 @=@ Conference report +dnet:publication_resource @=@ 0004 @=@ International Conference +dnet:publication_resource @=@ 0004 @=@ International Conference Abstract/Poster +dnet:publication_resource @=@ 0004 @=@ International Conference ISI/JCR +dnet:publication_resource @=@ 0004 @=@ International Conference communication/abstract/poster +dnet:publication_resource @=@ 0004 @=@ National Conference +dnet:publication_resource @=@ 0004 @=@ National Conference Abstract/Poster +dnet:publication_resource @=@ 0004 @=@ National Conference communication/abstract/poster +dnet:publication_resource @=@ 0004 @=@ PREFACE_PROCEEDINGS +dnet:publication_resource @=@ 0004 @=@ PROCEEDING_PAPER +dnet:publication_resource @=@ 0004 @=@ Papers in Conference Proceedings +dnet:publication_resource @=@ 0004 @=@ Presentación +dnet:publication_resource @=@ 0004 @=@ Proceedings (peer-reviewed) +dnet:publication_resource @=@ 0004 @=@ Proceedings of a Conference +dnet:publication_resource @=@ 0004 @=@ Proceedings paper +dnet:publication_resource @=@ 0004 @=@ Póster +dnet:publication_resource @=@ 0004 @=@ actes_congres +dnet:publication_resource @=@ 0004 @=@ communication_avec_actes +dnet:publication_resource @=@ 0004 @=@ communication_invitee +dnet:publication_resource @=@ 0004 @=@ communication_par_affiche +dnet:publication_resource @=@ 0004 @=@ communication_sans_actes +dnet:publication_resource @=@ 0004 @=@ conference +dnet:publication_resource @=@ 0004 @=@ conference item +dnet:publication_resource @=@ 0004 @=@ conference proceeding +dnet:publication_resource @=@ 0004 @=@ conferenceObject +dnet:publication_resource @=@ 0004 @=@ conference_paper +dnet:publication_resource @=@ 0004 @=@ doc-type:conferenceObject +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_18co +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_18cp +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_5794 +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_6670 +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_c94f +dnet:publication_resource @=@ 0004 @=@ http://purl.org/coar/resource_type/c_f744 +dnet:publication_resource @=@ 0004 @=@ info:eu-repo/semantics/conferenceItem +dnet:publication_resource @=@ 0004 @=@ info:eu-repo/semantics/conferenceObject +dnet:publication_resource @=@ 0004 @=@ invited conference talk +dnet:publication_resource @=@ 0004 @=@ poster +dnet:publication_resource @=@ 0004 @=@ presentation +dnet:publication_resource @=@ 0004 @=@ proceeding, seminar, workshop without peer review +dnet:publication_resource @=@ 0004 @=@ proceedings +dnet:publication_resource @=@ 0004 @=@ proceedings-article +dnet:publication_resource @=@ 0004 @=@ publication-conferencepaper +dnet:publication_resource @=@ 0004 @=@ научный доклад +dnet:publication_resource @=@ 0005 @=@ Newspaper or magazine article +dnet:publication_resource @=@ 0005 @=@ http://purl.org/coar/resource_type/c_998f +dnet:publication_resource @=@ 0005 @=@ info:eu-repo/semantics/contributionToPeriodical +dnet:publication_resource @=@ 0045 @=@ Data Management Plan +dnet:publication_resource @=@ 0045 @=@ Data Management Plan (NSF Generic) +dnet:publication_resource @=@ 0045 @=@ http://purl.org/coar/resource_type/c_ab20 +dnet:publication_resource @=@ 0045 @=@ http://purl.org/spar/fabio/DataManagementPolicy +dnet:publication_resource @=@ 0045 @=@ http://purl.org/spar/fabio/DataManagementPolicyDocument +dnet:publication_resource @=@ 0045 @=@ http://purl.org/spar/fabio/DataMangementPlan +dnet:publication_resource @=@ 0045 @=@ plan de gestión de datos +dnet:publication_resource @=@ 0045 @=@ publication-datamanagementplan +dnet:publication_resource @=@ 0031 @=@ Data Descriptor +dnet:publication_resource @=@ 0031 @=@ DataPaper +dnet:publication_resource @=@ 0031 @=@ data-article +dnet:publication_resource @=@ 0031 @=@ http://purl.org/coar/resource_type/c_beb9 +dnet:publication_resource @=@ 0021 @=@ Dataset/Dataset +dnet:publication_resource @=@ 0021 @=@ Research Data +dnet:publication_resource @=@ 0021 @=@ dataset +dnet:publication_resource @=@ 0021 @=@ http://purl.org/coar/resource_type/c_ddb1 +dnet:publication_resource @=@ 0021 @=@ info:eu-repo/semantics/DDIInstance +dnet:publication_resource @=@ 0021 @=@ info:eu-repo/semantics/datafile +dnet:publication_resource @=@ 0021 @=@ info:eu-repo/semantics/dataset +dnet:publication_resource @=@ 0021 @=@ info:eu-repo/semantics/enhancedObjectFile +dnet:publication_resource @=@ 0006 @=@ Diss +dnet:publication_resource @=@ 0006 @=@ Dissertation +dnet:publication_resource @=@ 0006 @=@ Doctoral +dnet:publication_resource @=@ 0006 @=@ DoctoralThesis +dnet:publication_resource @=@ 0006 @=@ PhD thesis +dnet:publication_resource @=@ 0006 @=@ Tesis +dnet:publication_resource @=@ 0006 @=@ Text.Thesis.Doctoral +dnet:publication_resource @=@ 0006 @=@ Theses +dnet:publication_resource @=@ 0006 @=@ Thesis +dnet:publication_resource @=@ 0006 @=@ Thesis or Dissertation +dnet:publication_resource @=@ 0006 @=@ Thesis.Doctoral +dnet:publication_resource @=@ 0006 @=@ doc-type:doctoralThesis +dnet:publication_resource @=@ 0006 @=@ http://purl.org/coar/resource_type/c_db06 +dnet:publication_resource @=@ 0006 @=@ info:eu-repo/semantics/doctoralThesis +dnet:publication_resource @=@ 0006 @=@ publication-thesis +dnet:publication_resource @=@ 0006 @=@ these +dnet:publication_resource @=@ 0006 @=@ these exercice +dnet:publication_resource @=@ 0023 @=@ Event/Event +dnet:publication_resource @=@ 0023 @=@ event +dnet:publication_resource @=@ 0009 @=@ Departmental Technical Report +dnet:publication_resource @=@ 0009 @=@ Informe Técnico +dnet:publication_resource @=@ 0009 @=@ RESEARCH_REPORT +dnet:publication_resource @=@ 0009 @=@ Tech-Report +dnet:publication_resource @=@ 0009 @=@ Technical Report +dnet:publication_resource @=@ 0009 @=@ http://purl.org/coar/resource_type/c_18gh +dnet:publication_resource @=@ 0009 @=@ publication-technicalnote +dnet:publication_resource @=@ 0009 @=@ research report +dnet:publication_resource @=@ 0024 @=@ Video +dnet:publication_resource @=@ 0024 @=@ film +dnet:publication_resource @=@ 0024 @=@ http://purl.org/coar/resource_type/c_12ce +dnet:publication_resource @=@ 0024 @=@ http://purl.org/coar/resource_type/c_8a7e +dnet:publication_resource @=@ 0025 @=@ Diagram +dnet:publication_resource @=@ 0025 @=@ Drawing +dnet:publication_resource @=@ 0025 @=@ Figure +dnet:publication_resource @=@ 0025 @=@ Image/Image +dnet:publication_resource @=@ 0025 @=@ Imagen +dnet:publication_resource @=@ 0025 @=@ Photo +dnet:publication_resource @=@ 0025 @=@ Plot +dnet:publication_resource @=@ 0025 @=@ fotó +dnet:publication_resource @=@ 0025 @=@ grafika +dnet:publication_resource @=@ 0025 @=@ http://purl.org/coar/resource_type/c_ecc8 +dnet:publication_resource @=@ 0025 @=@ image +dnet:publication_resource @=@ 0025 @=@ image-diagram +dnet:publication_resource @=@ 0025 @=@ image-drawing +dnet:publication_resource @=@ 0025 @=@ image-figure +dnet:publication_resource @=@ 0025 @=@ image-other +dnet:publication_resource @=@ 0025 @=@ image-photo +dnet:publication_resource @=@ 0025 @=@ image-plot +dnet:publication_resource @=@ 0026 @=@ http://purl.org/coar/resource_type/c_e9a0 +dnet:publication_resource @=@ 0026 @=@ interactiveResource +dnet:publication_resource @=@ 0011 @=@ Internal note +dnet:publication_resource @=@ 0011 @=@ http://purl.org/coar/resource_type/c_18ww +dnet:publication_resource @=@ 0043 @=@ http://purl.org/coar/resource_type/c_0640 +dnet:publication_resource @=@ 0010 @=@ Inaugural lecture +dnet:publication_resource @=@ 0010 @=@ Material didáctico +dnet:publication_resource @=@ 0010 @=@ Public-Lecture +dnet:publication_resource @=@ 0010 @=@ http://purl.org/coar/resource_type/c_8544 +dnet:publication_resource @=@ 0010 @=@ info:eu-repo/semantics/lecture +dnet:publication_resource @=@ 0010 @=@ lesson +dnet:publication_resource @=@ 0010 @=@ Учебный материал +dnet:publication_resource @=@ 0007 @=@ Diploma Project +dnet:publication_resource @=@ 0007 @=@ MSc Thesis +dnet:publication_resource @=@ 0007 @=@ Master Degree +dnet:publication_resource @=@ 0007 @=@ Master's +dnet:publication_resource @=@ 0007 @=@ Masterarbeit u.a. +dnet:publication_resource @=@ 0007 @=@ Masters (Taught) +dnet:publication_resource @=@ 0007 @=@ Masters thesis +dnet:publication_resource @=@ 0007 @=@ Masters-Thesis.Magister +dnet:publication_resource @=@ 0007 @=@ Tesina +dnet:publication_resource @=@ 0007 @=@ Thesis.Master +dnet:publication_resource @=@ 0007 @=@ Trabajo fin de Máster +dnet:publication_resource @=@ 0007 @=@ doc-type:masterThesis +dnet:publication_resource @=@ 0007 @=@ hdr +dnet:publication_resource @=@ 0007 @=@ http://purl.org/coar/resource_type/c_bdcc +dnet:publication_resource @=@ 0007 @=@ info:eu-repo/semantics/masterThesis +dnet:publication_resource @=@ 0007 @=@ masterThesis +dnet:publication_resource @=@ 0007 @=@ memoire +dnet:publication_resource @=@ 0027 @=@ Model/Model +dnet:publication_resource @=@ 0027 @=@ model +dnet:publication_resource @=@ 0020 @=@ Exhibition +dnet:publication_resource @=@ 0020 @=@ Learning Object +dnet:publication_resource @=@ 0020 @=@ Mapa +dnet:publication_resource @=@ 0020 @=@ Modelo de utilidad +dnet:publication_resource @=@ 0020 @=@ PEDAGOGICAL_DOCUMENT +dnet:publication_resource @=@ 0020 @=@ Partitura +dnet:publication_resource @=@ 0020 @=@ Sitio web +dnet:publication_resource @=@ 0020 @=@ Trabajo de divulgación +dnet:publication_resource @=@ 0020 @=@ Web publication/site +dnet:publication_resource @=@ 0020 @=@ application +dnet:publication_resource @=@ 0020 @=@ artefact +dnet:publication_resource @=@ 0020 @=@ carte +dnet:publication_resource @=@ 0020 @=@ composition +dnet:publication_resource @=@ 0020 @=@ document_audiovisuel +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_12cc +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_12cd +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_1843 +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_18cd +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_18cw +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_26e4 +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_7ad9 +dnet:publication_resource @=@ 0020 @=@ http://purl.org/coar/resource_type/c_e059 +dnet:publication_resource @=@ 0020 @=@ info:eu-repo/semantics/other +dnet:publication_resource @=@ 0020 @=@ learningObject +dnet:publication_resource @=@ 0020 @=@ map +dnet:publication_resource @=@ 0020 @=@ misc +dnet:publication_resource @=@ 0020 @=@ other +dnet:publication_resource @=@ 0020 @=@ revue +dnet:publication_resource @=@ 0038 @=@ Abstract +dnet:publication_resource @=@ 0038 @=@ Blog +dnet:publication_resource @=@ 0038 @=@ Book Prospectus +dnet:publication_resource @=@ 0038 @=@ Dictionary Entry +dnet:publication_resource @=@ 0038 @=@ Disclosure +dnet:publication_resource @=@ 0038 @=@ Editorial +dnet:publication_resource @=@ 0038 @=@ Editorial ISI/JCR +dnet:publication_resource @=@ 0038 @=@ Editors +dnet:publication_resource @=@ 0038 @=@ Editors (non peer-reviewed) +dnet:publication_resource @=@ 0038 @=@ Editors (peer-reviewed) +dnet:publication_resource @=@ 0038 @=@ Encyclopedia Entry +dnet:publication_resource @=@ 0038 @=@ Entrada de blog +dnet:publication_resource @=@ 0038 @=@ Funding Submission +dnet:publication_resource @=@ 0038 @=@ HabilitationThesis +dnet:publication_resource @=@ 0038 @=@ License +dnet:publication_resource @=@ 0038 @=@ Manual +dnet:publication_resource @=@ 0038 @=@ Manuscript +dnet:publication_resource @=@ 0038 @=@ Manuscrito +dnet:publication_resource @=@ 0038 @=@ Other publication (non peer-review) +dnet:publication_resource @=@ 0038 @=@ Other publication (peer-review) +dnet:publication_resource @=@ 0038 @=@ Revista +dnet:publication_resource @=@ 0038 @=@ Supervised Student Publication +dnet:publication_resource @=@ 0038 @=@ Tesis/trabajos de grado – Thesis +dnet:publication_resource @=@ 0038 @=@ Text +dnet:publication_resource @=@ 0038 @=@ Text/Text +dnet:publication_resource @=@ 0038 @=@ Trademark +dnet:publication_resource @=@ 0038 @=@ Translation +dnet:publication_resource @=@ 0038 @=@ afterword +dnet:publication_resource @=@ 0038 @=@ avantpropos +dnet:publication_resource @=@ 0038 @=@ bibliography +dnet:publication_resource @=@ 0038 @=@ chronique +dnet:publication_resource @=@ 0038 @=@ compte rendu +dnet:publication_resource @=@ 0038 @=@ correction +dnet:publication_resource @=@ 0038 @=@ foreword +dnet:publication_resource @=@ 0038 @=@ habilitation à diriger des recherches +dnet:publication_resource @=@ 0038 @=@ historicalDocument +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_0040 +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_0857 +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_18cf +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_18wz +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_3e5a +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_46ec +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_6947 +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_7acd +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_86bc +dnet:publication_resource @=@ 0038 @=@ http://purl.org/coar/resource_type/c_b239 +dnet:publication_resource @=@ 0038 @=@ note de lecture +dnet:publication_resource @=@ 0038 @=@ notedelecture +dnet:publication_resource @=@ 0038 @=@ other publication +dnet:publication_resource @=@ 0038 @=@ postface +dnet:publication_resource @=@ 0038 @=@ publication-other +dnet:publication_resource @=@ 0038 @=@ revuedepresse +dnet:publication_resource @=@ 0038 @=@ sa_component +dnet:publication_resource @=@ 0038 @=@ standard +dnet:publication_resource @=@ 0038 @=@ standard-series +dnet:publication_resource @=@ 0013 @=@ A3 Kirjan tai muun kokoomateoksen osa +dnet:publication_resource @=@ 0013 @=@ Book Part (author) +dnet:publication_resource @=@ 0013 @=@ Book Section / Chapter +dnet:publication_resource @=@ 0013 @=@ Book chapter or Essay in book +dnet:publication_resource @=@ 0013 @=@ Book editorial +dnet:publication_resource @=@ 0013 @=@ Book section +dnet:publication_resource @=@ 0013 @=@ Book_Chapter +dnet:publication_resource @=@ 0013 @=@ Buchbeitrag +dnet:publication_resource @=@ 0013 @=@ Capítulo de libro +dnet:publication_resource @=@ 0013 @=@ Contribution to International Book/Monograph +dnet:publication_resource @=@ 0013 @=@ Contribution to International Book/Monograph ISI/JCR +dnet:publication_resource @=@ 0013 @=@ Contribution to National Book/Monograph +dnet:publication_resource @=@ 0013 @=@ Contribution to book (non peer-reviewed) +dnet:publication_resource @=@ 0013 @=@ Contribution to book (peer-reviewed) +dnet:publication_resource @=@ 0013 @=@ Part of book - chapter +dnet:publication_resource @=@ 0013 @=@ book chapter +dnet:publication_resource @=@ 0013 @=@ book-part +dnet:publication_resource @=@ 0013 @=@ bookPart +dnet:publication_resource @=@ 0013 @=@ book_content +dnet:publication_resource @=@ 0013 @=@ chapitre_ouvrage +dnet:publication_resource @=@ 0013 @=@ chapter +dnet:publication_resource @=@ 0013 @=@ doc-type:bookPart +dnet:publication_resource @=@ 0013 @=@ http://purl.org/coar/resource_type/c_3248 +dnet:publication_resource @=@ 0013 @=@ info:eu-repo/semantics/bookPart +dnet:publication_resource @=@ 0013 @=@ publication-section +dnet:publication_resource @=@ 0013 @=@ reference-entry +dnet:publication_resource @=@ 0013 @=@ reference_entry +dnet:publication_resource @=@ 0013 @=@ scientific book chapter +dnet:publication_resource @=@ 0013 @=@ Глава монографии +dnet:publication_resource @=@ 0019 @=@ H1 Myönnetty patentti +dnet:publication_resource @=@ 0019 @=@ Patent +dnet:publication_resource @=@ 0019 @=@ Patente +dnet:publication_resource @=@ 0019 @=@ Solicitud de patente +dnet:publication_resource @=@ 0019 @=@ Traducción de patente +dnet:publication_resource @=@ 0019 @=@ brevet +dnet:publication_resource @=@ 0019 @=@ http://purl.org/coar/resource_type/c_15cd +dnet:publication_resource @=@ 0019 @=@ info:eu-repo/semantics/patent +dnet:publication_resource @=@ 0019 @=@ publication-patent +dnet:publication_resource @=@ 0028 @=@ Service +dnet:publication_resource @=@ 0028 @=@ physicalObject +dnet:publication_resource @=@ 0016 @=@ Pre Print +dnet:publication_resource @=@ 0016 @=@ Pre-print +dnet:publication_resource @=@ 0016 @=@ http://purl.org/coar/resource_type/c_816b +dnet:publication_resource @=@ 0016 @=@ info:eu-repo/semantics/preprint +dnet:publication_resource @=@ 0016 @=@ publication-preprint +dnet:publication_resource @=@ 0016 @=@ Препринт +dnet:publication_resource @=@ 0034 @=@ Project deliverable +dnet:publication_resource @=@ 0034 @=@ http://purl.org/coar/resource_type/c_18op +dnet:publication_resource @=@ 0034 @=@ publication-deliverable +dnet:publication_resource @=@ 0035 @=@ Project milestone +dnet:publication_resource @=@ 0035 @=@ publication-milestone +dnet:publication_resource @=@ 0036 @=@ Proposal +dnet:publication_resource @=@ 0036 @=@ http://purl.org/coar/resource_type/c_baaf +dnet:publication_resource @=@ 0036 @=@ research-proposal +dnet:publication_resource @=@ 0017 @=@ ACTIVITY_REPORT +dnet:publication_resource @=@ 0017 @=@ Commissioned report +dnet:publication_resource @=@ 0017 @=@ D4 Julkaistu kehittämis- tai tutkimusraportti tai -selvitys +dnet:publication_resource @=@ 0017 @=@ Deliverable +dnet:publication_resource @=@ 0017 @=@ Documento tecnico +dnet:publication_resource @=@ 0017 @=@ Project Report +dnet:publication_resource @=@ 0017 @=@ Software documentation +dnet:publication_resource @=@ 0017 @=@ brief-report +dnet:publication_resource @=@ 0017 @=@ case-report +dnet:publication_resource @=@ 0017 @=@ chapitre_rapport +dnet:publication_resource @=@ 0017 @=@ doc-type:report +dnet:publication_resource @=@ 0017 @=@ document_institutionnel +dnet:publication_resource @=@ 0017 @=@ document_technique +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_186u +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_18hj +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_18wq +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_18ws +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_71bd +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_93fc +dnet:publication_resource @=@ 0017 @=@ http://purl.org/coar/resource_type/c_ba1f +dnet:publication_resource @=@ 0017 @=@ info:eu-repo/semantics/report +dnet:publication_resource @=@ 0017 @=@ publication-report +dnet:publication_resource @=@ 0017 @=@ publication-softwaredocumentation +dnet:publication_resource @=@ 0017 @=@ rapport_expertise +dnet:publication_resource @=@ 0017 @=@ rapport_mission +dnet:publication_resource @=@ 0017 @=@ report +dnet:publication_resource @=@ 0017 @=@ report-paper +dnet:publication_resource @=@ 0017 @=@ report-paper_title +dnet:publication_resource @=@ 0017 @=@ report-series +dnet:publication_resource @=@ 0017 @=@ support_cours +dnet:publication_resource @=@ 0014 @=@ Arbeitspapier +dnet:publication_resource @=@ 0014 @=@ Departmental Bulletin Paper +dnet:publication_resource @=@ 0014 @=@ Documento de trabajo +dnet:publication_resource @=@ 0014 @=@ Paper +dnet:publication_resource @=@ 0014 @=@ Project description +dnet:publication_resource @=@ 0014 @=@ Research-Paper +dnet:publication_resource @=@ 0014 @=@ ResearchPaper +dnet:publication_resource @=@ 0014 @=@ Working / discussion paper +dnet:publication_resource @=@ 0014 @=@ Working Paper +dnet:publication_resource @=@ 0014 @=@ Working Paper / Technical Report +dnet:publication_resource @=@ 0014 @=@ doc-type:workingPaper +dnet:publication_resource @=@ 0014 @=@ http://purl.org/coar/resource_type/c_8042 +dnet:publication_resource @=@ 0014 @=@ info:eu-repo/semantics/paper +dnet:publication_resource @=@ 0014 @=@ info:eu-repo/semantics/workingPaper +dnet:publication_resource @=@ 0014 @=@ publication-workingpaper +dnet:publication_resource @=@ 0014 @=@ workingPaper +dnet:publication_resource @=@ 0015 @=@ A2 Katsausartikkeli tieteellisessä aikakauslehdessä +dnet:publication_resource @=@ 0015 @=@ Book Review +dnet:publication_resource @=@ 0015 @=@ Book/Film/Article review +dnet:publication_resource @=@ 0015 @=@ Literature review +dnet:publication_resource @=@ 0015 @=@ Peer review +dnet:publication_resource @=@ 0015 @=@ Reseña bibliográfica +dnet:publication_resource @=@ 0015 @=@ Review Article +dnet:publication_resource @=@ 0015 @=@ RezensionReview +dnet:publication_resource @=@ 0015 @=@ book-review +dnet:publication_resource @=@ 0015 @=@ http://purl.org/coar/resource_type/c_ba08 +dnet:publication_resource @=@ 0015 @=@ http://purl.org/coar/resource_type/c_dcae04bc +dnet:publication_resource @=@ 0015 @=@ http://purl.org/coar/resource_type/c_efa0 +dnet:publication_resource @=@ 0015 @=@ info:eu-repo/semantics/review +dnet:publication_resource @=@ 0015 @=@ peer-review +dnet:publication_resource @=@ 0029 @=@ Software +dnet:publication_resource @=@ 0029 @=@ Software/Software +dnet:publication_resource @=@ 0029 @=@ Workflow +dnet:publication_resource @=@ 0029 @=@ Workflow/Workflow +dnet:publication_resource @=@ 0029 @=@ http://purl.org/coar/resource_type/c_393c +dnet:publication_resource @=@ 0029 @=@ http://purl.org/coar/resource_type/c_5ce6 +dnet:publication_resource @=@ 0029 @=@ http://purl.org/coar/resource_type/c_c950 +dnet:publication_resource @=@ 0032 @=@ http://purl.org/coar/resource_type/c_7bab +dnet:publication_resource @=@ 0030 @=@ http://purl.org/coar/resource_type/c_18cc +dnet:publication_resource @=@ 0030 @=@ sound +dnet:publication_resource @=@ 0044 @=@ Graduate diploma +dnet:publication_resource @=@ 0044 @=@ Undergraduate diploma +dnet:publication_resource @=@ 0000 @=@ UNKNOWN +dnet:publication_resource @=@ 0042 @=@ EGI Virtual Appliance +dnet:languages @=@ abk @=@ ab +dnet:languages @=@ aar @=@ aa +dnet:languages @=@ afr @=@ af +dnet:languages @=@ alb/sqi @=@ sq +dnet:languages @=@ amh @=@ am +dnet:languages @=@ ara @=@ ar +dnet:languages @=@ arm/hye @=@ hy +dnet:languages @=@ asm @=@ as +dnet:languages @=@ ina @=@ ia +dnet:languages @=@ aym @=@ ay +dnet:languages @=@ aze @=@ az +dnet:languages @=@ bak @=@ ba +dnet:languages @=@ baq/eus @=@ eu +dnet:languages @=@ bel @=@ be +dnet:languages @=@ ben @=@ bn +dnet:languages @=@ bih @=@ bh +dnet:languages @=@ bis @=@ bi +dnet:languages @=@ bre @=@ br +dnet:languages @=@ bul @=@ bg +dnet:languages @=@ bur/mya @=@ my +dnet:languages @=@ cat @=@ ca +dnet:languages @=@ chi/zho @=@ zh +dnet:languages @=@ cos @=@ co +dnet:languages @=@ hrv @=@ hr +dnet:languages @=@ hrv @=@ hr +dnet:languages @=@ hrv @=@ scr/hrv +dnet:languages @=@ ces/cze @=@ cs +dnet:languages @=@ dan @=@ da +dnet:languages @=@ dut/nld @=@ dut/nla +dnet:languages @=@ dut/nld @=@ dutdut +dnet:languages @=@ dut/nld @=@ nl +dnet:languages @=@ dut/nld @=@ nl_be +dnet:languages @=@ dut/nld @=@ nl_nl +dnet:languages @=@ dut/nld @=@ nld +dnet:languages @=@ dzo @=@ dz +dnet:languages @=@ eng @=@ en +dnet:languages @=@ eng @=@ en_au +dnet:languages @=@ eng @=@ en_en +dnet:languages @=@ eng @=@ en_gb +dnet:languages @=@ eng @=@ en_nz +dnet:languages @=@ eng @=@ en_us +dnet:languages @=@ eng @=@ english +dnet:languages @=@ eng @=@ en-us +dnet:languages @=@ eng @=@ en-US +dnet:languages @=@ eng @=@ English +dnet:languages @=@ eng @=@ EN +dnet:languages @=@ eng @=@ en angielski +dnet:languages @=@ eng @=@ en-GB +dnet:languages @=@ eng @=@ Englisch +dnet:languages @=@ epo @=@ eo +dnet:languages @=@ est @=@ et +dnet:languages @=@ fao @=@ fo +dnet:languages @=@ fij @=@ fj +dnet:languages @=@ fin @=@ fi +dnet:languages @=@ fin @=@ Finnish +dnet:languages @=@ fra/fre @=@ fr +dnet:languages @=@ fra/fre @=@ FR +dnet:languages @=@ fra/fre @=@ fr_be +dnet:languages @=@ fra/fre @=@ fr_fr +dnet:languages @=@ fra/fre @=@ fre/fra +dnet:languages @=@ fra/fre @=@ fra +dnet:languages @=@ fry @=@ fy +dnet:languages @=@ glg @=@ gl +dnet:languages @=@ geo/kat @=@ ka +dnet:languages @=@ deu/ger @=@ de +dnet:languages @=@ deu/ger @=@ ger/deu +dnet:languages @=@ deu/ger @=@ german +dnet:languages @=@ deu/ger @=@ ger +dnet:languages @=@ deu/ger @=@ deu +dnet:languages @=@ deu/ger @=@ DE-de +dnet:languages @=@ ell/gre @=@ el +dnet:languages @=@ ell/gre @=@ gr +dnet:languages @=@ ell/gre @=@ el-GR +dnet:languages @=@ kal @=@ kl +dnet:languages @=@ grn @=@ gn +dnet:languages @=@ guj @=@ gu +dnet:languages @=@ hau @=@ ha +dnet:languages @=@ heb @=@ he +dnet:languages @=@ hin @=@ hi +dnet:languages @=@ hun @=@ hu +dnet:languages @=@ ice/isl @=@ is +dnet:languages @=@ ine @=@ - +dnet:languages @=@ ind @=@ id +dnet:languages @=@ iku @=@ iu +dnet:languages @=@ ipk @=@ ik +dnet:languages @=@ gai/iri @=@ ga +dnet:languages @=@ gai/iri @=@ gle +dnet:languages @=@ ita @=@ it +dnet:languages @=@ jpn @=@ ja +dnet:languages @=@ jav @=@ jv +dnet:languages @=@ jav @=@ jv/jw +dnet:languages @=@ jav @=@ jw +dnet:languages @=@ kan @=@ kn +dnet:languages @=@ kas @=@ ks +dnet:languages @=@ kaz @=@ kk +dnet:languages @=@ khm @=@ km +dnet:languages @=@ kin @=@ rw +dnet:languages @=@ kir @=@ ky +dnet:languages @=@ kor @=@ ko +dnet:languages @=@ kur @=@ ku +dnet:languages @=@ lao @=@ lo +dnet:languages @=@ lat @=@ la +dnet:languages @=@ lav @=@ lv +dnet:languages @=@ lin @=@ ln +dnet:languages @=@ lit @=@ lt +dnet:languages @=@ mac/mak @=@ mk +dnet:languages @=@ mlg @=@ mg +dnet:languages @=@ may/msa @=@ ms +dnet:languages @=@ mlt @=@ ml +dnet:languages @=@ mao/mri @=@ mi +dnet:languages @=@ mar @=@ mr +dnet:languages @=@ mol @=@ mo +dnet:languages @=@ mon @=@ mn +dnet:languages @=@ nau @=@ na +dnet:languages @=@ nep @=@ ne +dnet:languages @=@ nor @=@ no +dnet:languages @=@ oci @=@ oc +dnet:languages @=@ ori @=@ or +dnet:languages @=@ orm @=@ om +dnet:languages @=@ pan @=@ pa +dnet:languages @=@ fas/per @=@ fa +dnet:languages @=@ pol @=@ pl +dnet:languages @=@ por @=@ pt +dnet:languages @=@ por @=@ pt_pt +dnet:languages @=@ pus @=@ ps +dnet:languages @=@ que @=@ qu +dnet:languages @=@ roh @=@ rm +dnet:languages @=@ ron/rum @=@ ro +dnet:languages @=@ run @=@ rn +dnet:languages @=@ rus @=@ ru +dnet:languages @=@ smo @=@ sm +dnet:languages @=@ sag @=@ sg +dnet:languages @=@ san @=@ sa +dnet:languages @=@ srp @=@ scc/srp +dnet:languages @=@ srp @=@ sr +dnet:languages @=@ scr @=@ sh +dnet:languages @=@ sna @=@ sn +dnet:languages @=@ snd @=@ sd +dnet:languages @=@ sin @=@ si +dnet:languages @=@ sit @=@ - +dnet:languages @=@ slk/slo @=@ sk +dnet:languages @=@ slv @=@ sl +dnet:languages @=@ som @=@ so +dnet:languages @=@ sot @=@ st +dnet:languages @=@ esl/spa @=@ es +dnet:languages @=@ sun @=@ su +dnet:languages @=@ swa @=@ sw +dnet:languages @=@ ssw @=@ ss +dnet:languages @=@ swe @=@ sv +dnet:languages @=@ swe @=@ sve/swe +dnet:languages @=@ tgl @=@ tl +dnet:languages @=@ tgk @=@ tg +dnet:languages @=@ tam @=@ ta +dnet:languages @=@ tat @=@ tt +dnet:languages @=@ tel @=@ te +dnet:languages @=@ tha @=@ th +dnet:languages @=@ tha @=@ thai +dnet:languages @=@ bod/tib @=@ bo +dnet:languages @=@ tir @=@ ti +dnet:languages @=@ tog @=@ to +dnet:languages @=@ tso @=@ ts +dnet:languages @=@ tsn @=@ tn +dnet:languages @=@ tur @=@ tr +dnet:languages @=@ tuk @=@ tk +dnet:languages @=@ twi @=@ tw +dnet:languages @=@ uig @=@ ug +dnet:languages @=@ ukr @=@ uk +dnet:languages @=@ und @=@ UNKNOWN +dnet:languages @=@ und @=@ none +dnet:languages @=@ urd @=@ ur +dnet:languages @=@ uzb @=@ uz +dnet:languages @=@ vie @=@ vi +dnet:languages @=@ vol @=@ vo +dnet:languages @=@ wln @=@ wa +dnet:languages @=@ cym/wel @=@ cy +dnet:languages @=@ wol @=@ wo +dnet:languages @=@ xho @=@ xh +dnet:languages @=@ yid @=@ yi +dnet:languages @=@ yor @=@ yo +dnet:languages @=@ zha @=@ za +dnet:languages @=@ zul @=@ zu +dnet:result_typologies @=@ dataset @=@ 0021 +dnet:result_typologies @=@ dataset @=@ 0024 +dnet:result_typologies @=@ dataset @=@ 0025 +dnet:result_typologies @=@ dataset @=@ 0030 +dnet:result_typologies @=@ dataset @=@ 0033 +dnet:result_typologies @=@ dataset @=@ 0037 +dnet:result_typologies @=@ dataset @=@ 0039 +dnet:result_typologies @=@ dataset @=@ 0046 +dnet:result_typologies @=@ other @=@ 0000 +dnet:result_typologies @=@ other @=@ 0010 +dnet:result_typologies @=@ other @=@ 0018 +dnet:result_typologies @=@ other @=@ 0020 +dnet:result_typologies @=@ other @=@ 0022 +dnet:result_typologies @=@ other @=@ 0023 +dnet:result_typologies @=@ other @=@ 0026 +dnet:result_typologies @=@ other @=@ 0027 +dnet:result_typologies @=@ other @=@ 0028 +dnet:result_typologies @=@ other @=@ 0042 +dnet:result_typologies @=@ publication @=@ 0001 +dnet:result_typologies @=@ publication @=@ 0002 +dnet:result_typologies @=@ publication @=@ 0004 +dnet:result_typologies @=@ publication @=@ 0005 +dnet:result_typologies @=@ publication @=@ 0006 +dnet:result_typologies @=@ publication @=@ 0007 +dnet:result_typologies @=@ publication @=@ 0008 +dnet:result_typologies @=@ publication @=@ 0009 +dnet:result_typologies @=@ publication @=@ 0011 +dnet:result_typologies @=@ publication @=@ 0012 +dnet:result_typologies @=@ publication @=@ 0013 +dnet:result_typologies @=@ publication @=@ 0014 +dnet:result_typologies @=@ publication @=@ 0015 +dnet:result_typologies @=@ publication @=@ 0016 +dnet:result_typologies @=@ publication @=@ 0017 +dnet:result_typologies @=@ publication @=@ 0019 +dnet:result_typologies @=@ publication @=@ 0031 +dnet:result_typologies @=@ publication @=@ 0032 +dnet:result_typologies @=@ publication @=@ 0034 +dnet:result_typologies @=@ publication @=@ 0035 +dnet:result_typologies @=@ publication @=@ 0036 +dnet:result_typologies @=@ publication @=@ 0038 +dnet:result_typologies @=@ publication @=@ 0044 +dnet:result_typologies @=@ publication @=@ 0045 +dnet:result_typologies @=@ software @=@ 0029 +dnet:result_typologies @=@ software @=@ 0040 +dnet:countries @=@ AF @=@ AFG +dnet:countries @=@ AF @=@ Afghanistan +dnet:countries @=@ AD @=@ Andorra +dnet:countries @=@ AO @=@ Angola +dnet:countries @=@ AR @=@ ARG +dnet:countries @=@ AR @=@ Argentina +dnet:countries @=@ AU @=@ AUS +dnet:countries @=@ AU @=@ Australia +dnet:countries @=@ AT @=@ AUT +dnet:countries @=@ AT @=@ Austria +dnet:countries @=@ AZ @=@ AZE +dnet:countries @=@ BD @=@ Bangladesh +dnet:countries @=@ BY @=@ Belarus +dnet:countries @=@ BE @=@ BEL +dnet:countries @=@ BE @=@ Belgium +dnet:countries @=@ BJ @=@ BEN +dnet:countries @=@ BO @=@ Bolivia, Plurinational State of +dnet:countries @=@ BA @=@ BIH +dnet:countries @=@ BA @=@ Bosnia-Hercegovina +dnet:countries @=@ BR @=@ BRA +dnet:countries @=@ BR @=@ Brazil +dnet:countries @=@ BG @=@ Bulgaria +dnet:countries @=@ BF @=@ BFA +dnet:countries @=@ KH @=@ Cambodia +dnet:countries @=@ KH @=@ Cambogia +dnet:countries @=@ KH @=@ Campuchea +dnet:countries @=@ CM @=@ CMR +dnet:countries @=@ CA @=@ CAN +dnet:countries @=@ CA @=@ Canada +dnet:countries @=@ CV @=@ Cape Verde +dnet:countries @=@ CL @=@ CHL +dnet:countries @=@ CL @=@ Chile +dnet:countries @=@ CN @=@ CHN +dnet:countries @=@ CN @=@ China +dnet:countries @=@ CO @=@ COL +dnet:countries @=@ CO @=@ Colombia +dnet:countries @=@ CD @=@ Congo +dnet:countries @=@ CD @=@ Congo Democratic Republic (formerly Zaire) +dnet:countries @=@ CD @=@ Congo, Republic +dnet:countries @=@ CD @=@ Congo, the Democratic Republic of the +dnet:countries @=@ CD @=@ Zaire +dnet:countries @=@ CR @=@ CRI +dnet:countries @=@ CI @=@ CIV +dnet:countries @=@ CI @=@ Ivory Coast +dnet:countries @=@ HR @=@ Croatia +dnet:countries @=@ HR @=@ HRV +dnet:countries @=@ CY @=@ CYP +dnet:countries @=@ CY @=@ Cyprus +dnet:countries @=@ CZ @=@ CZE +dnet:countries @=@ CZ @=@ Czech Republic +dnet:countries @=@ CZ @=@ Czechia +dnet:countries @=@ CZ @=@ Czechoslovakia +dnet:countries @=@ DK @=@ DNK +dnet:countries @=@ DK @=@ Denmark +dnet:countries @=@ EC @=@ Ecuador +dnet:countries @=@ EG @=@ EGY +dnet:countries @=@ EG @=@ Egypt +dnet:countries @=@ SV @=@ SLV +dnet:countries @=@ EE @=@ EST +dnet:countries @=@ EE @=@ Estonia +dnet:countries @=@ ET @=@ ETH +dnet:countries @=@ EU @=@ EEC +dnet:countries @=@ FJ @=@ FJI +dnet:countries @=@ FI @=@ FIN +dnet:countries @=@ FI @=@ Finland +dnet:countries @=@ MK @=@ Macedonia +dnet:countries @=@ MK @=@ Macedonia, the Former Yugoslav Republic Of +dnet:countries @=@ MK @=@ North Macedonia +dnet:countries @=@ FR @=@ FRA +dnet:countries @=@ FR @=@ France +dnet:countries @=@ PF @=@ French Polynesia +dnet:countries @=@ PF @=@ PYF +dnet:countries @=@ TF @=@ French Southern Territories +dnet:countries @=@ GE @=@ Georgia +dnet:countries @=@ DE @=@ DEU +dnet:countries @=@ DE @=@ Germany +dnet:countries @=@ DE @=@ Germany, Berlin +dnet:countries @=@ GH @=@ GHA +dnet:countries @=@ GR @=@ EL +dnet:countries @=@ GR @=@ GRC +dnet:countries @=@ GL @=@ GRL +dnet:countries @=@ GN @=@ Guinea +dnet:countries @=@ GW @=@ Guinea-Bissau +dnet:countries @=@ VA @=@ Vatican State +dnet:countries @=@ HK @=@ HKG +dnet:countries @=@ HK @=@ Hong Kong +dnet:countries @=@ HK @=@ Hongkong +dnet:countries @=@ HU @=@ HUN +dnet:countries @=@ HU @=@ Hungary +dnet:countries @=@ IS @=@ ISL +dnet:countries @=@ IN @=@ IND +dnet:countries @=@ IN @=@ India +dnet:countries @=@ ID @=@ IDN +dnet:countries @=@ ID @=@ Indonesia +dnet:countries @=@ IR @=@ Iran +dnet:countries @=@ IR @=@ Iran, Islamic Republic of +dnet:countries @=@ IE @=@ IRL +dnet:countries @=@ IE @=@ Ireland +dnet:countries @=@ IL @=@ ISR +dnet:countries @=@ IL @=@ Israel +dnet:countries @=@ IT @=@ ITA +dnet:countries @=@ IT @=@ Italy +dnet:countries @=@ JM @=@ Jamaica +dnet:countries @=@ JP @=@ JPN +dnet:countries @=@ JP @=@ Japan +dnet:countries @=@ KZ @=@ KAZ +dnet:countries @=@ KZ @=@ Kazakistan +dnet:countries @=@ KZ @=@ Kazakstan +dnet:countries @=@ KE @=@ KEN +dnet:countries @=@ KE @=@ Kenya +dnet:countries @=@ KR @=@ KOR +dnet:countries @=@ KR @=@ Korea, Republic of +dnet:countries @=@ KR @=@ Korean Republic (South Korea) +dnet:countries @=@ KP @=@ PRK +dnet:countries @=@ LV @=@ LVA +dnet:countries @=@ LY @=@ Libya +dnet:countries @=@ LT @=@ LTU +dnet:countries @=@ LU @=@ LUX +dnet:countries @=@ LU @=@ Luxembourg +dnet:countries @=@ MO @=@ Macao +dnet:countries @=@ MG @=@ Madagascar +dnet:countries @=@ MY @=@ Malaysia +dnet:countries @=@ ML @=@ Mali +dnet:countries @=@ MT @=@ Malta +dnet:countries @=@ MU @=@ Mauritius +dnet:countries @=@ MX @=@ MEX +dnet:countries @=@ MX @=@ Mexico +dnet:countries @=@ FM @=@ Micronesia +dnet:countries @=@ MD @=@ Moldova +dnet:countries @=@ MD @=@ Moldova, Republic of +dnet:countries @=@ MN @=@ Mongolia +dnet:countries @=@ MA @=@ Morocco +dnet:countries @=@ MZ @=@ Mozambique +dnet:countries @=@ NA @=@ NAM +dnet:countries @=@ NL @=@ NLD +dnet:countries @=@ NL @=@ Netherlands +dnet:countries @=@ AN @=@ Netherlands Antilles +dnet:countries @=@ NC @=@ NCL +dnet:countries @=@ NZ @=@ NZL +dnet:countries @=@ NZ @=@ New Zealand +dnet:countries @=@ NO @=@ NOR +dnet:countries @=@ NO @=@ Norway +dnet:countries @=@ OC @=@ Australasia +dnet:countries @=@ OM @=@ Oman +dnet:countries @=@ PK @=@ PAK +dnet:countries @=@ PK @=@ Pakistan +dnet:countries @=@ PS @=@ Palestin, State of +dnet:countries @=@ PS @=@ Palestine, State of +dnet:countries @=@ PS @=@ Palestinian Territory, Occupied +dnet:countries @=@ PA @=@ PAN +dnet:countries @=@ PA @=@ Panama +dnet:countries @=@ PG @=@ PapuaNew Guinea +dnet:countries @=@ PE @=@ PER +dnet:countries @=@ PH @=@ PHL +dnet:countries @=@ PH @=@ Philippines +dnet:countries @=@ PL @=@ POL +dnet:countries @=@ PL @=@ Poland +dnet:countries @=@ PT @=@ PRT +dnet:countries @=@ PT @=@ Portugal +dnet:countries @=@ PR @=@ Puerto Rico +dnet:countries @=@ RO @=@ ROU +dnet:countries @=@ RO @=@ Romania +dnet:countries @=@ RU @=@ RUS +dnet:countries @=@ RU @=@ Russia +dnet:countries @=@ RU @=@ Russian Federation +dnet:countries @=@ RE @=@ Réunion +dnet:countries @=@ KN @=@ Saint Kitts And Nevis +dnet:countries @=@ SA @=@ Saudi Arabia +dnet:countries @=@ SN @=@ SEN +dnet:countries @=@ RS @=@ SRB +dnet:countries @=@ CS @=@ Serbia and Montenegro +dnet:countries @=@ SG @=@ SGP +dnet:countries @=@ SG @=@ Singapore +dnet:countries @=@ SK @=@ SVK +dnet:countries @=@ SI @=@ SVN +dnet:countries @=@ SI @=@ Slovenia +dnet:countries @=@ ZA @=@ South Africa +dnet:countries @=@ ZA @=@ ZAF +dnet:countries @=@ ES @=@ ESP +dnet:countries @=@ ES @=@ Spain +dnet:countries @=@ LK @=@ LKA +dnet:countries @=@ LK @=@ Sri Lanka +dnet:countries @=@ SD @=@ SDN +dnet:countries @=@ SR @=@ Suriname +dnet:countries @=@ SE @=@ SWE +dnet:countries @=@ SE @=@ Sweden +dnet:countries @=@ CH @=@ CHE +dnet:countries @=@ CH @=@ Switzerland +dnet:countries @=@ SY @=@ Syria +dnet:countries @=@ ST @=@ Sao Tome and Principe +dnet:countries @=@ TW @=@ TWN +dnet:countries @=@ TW @=@ Taiwan +dnet:countries @=@ TW @=@ Taiwan, Province of China +dnet:countries @=@ TZ @=@ Tanzania +dnet:countries @=@ TZ @=@ Tanzania, United Republic of +dnet:countries @=@ TH @=@ THA +dnet:countries @=@ TH @=@ Thailand +dnet:countries @=@ TL @=@ East Timor +dnet:countries @=@ TN @=@ TUN +dnet:countries @=@ TN @=@ Tunisia +dnet:countries @=@ TR @=@ TUR +dnet:countries @=@ TR @=@ Turkey +dnet:countries @=@ UNKNOWN @=@ AAA +dnet:countries @=@ UNKNOWN @=@ [Unknown] +dnet:countries @=@ UNKNOWN @=@ _? +dnet:countries @=@ UA @=@ UKR +dnet:countries @=@ UA @=@ Ukraine +dnet:countries @=@ AE @=@ United Arab Emirates +dnet:countries @=@ GB @=@ England +dnet:countries @=@ GB @=@ GBR +dnet:countries @=@ GB @=@ Great Britain +dnet:countries @=@ GB @=@ Great Britain and Northern Ireland +dnet:countries @=@ GB @=@ Scotland +dnet:countries @=@ GB @=@ UK +dnet:countries @=@ GB @=@ United Kingdom +dnet:countries @=@ US @=@ USA +dnet:countries @=@ US @=@ United States +dnet:countries @=@ US @=@ United States of America +dnet:countries @=@ UY @=@ Uruguay +dnet:countries @=@ UZ @=@ Uzbekistan +dnet:countries @=@ VE @=@ Venezuela, Bolivarian Republic of +dnet:countries @=@ VN @=@ Vietnam +dnet:countries @=@ VG @=@ British Virgin Islands +dnet:countries @=@ YU @=@ Jugoslavia +dnet:countries @=@ YU @=@ Yugoslavia +dnet:countries @=@ ZW @=@ ABW +dnet:protocols @=@ oai @=@ OAI-PMH +dnet:protocols @=@ oai @=@ OAI_PMH +dnet:pid_types @=@ orcid @=@ ORCID12 +dnet:review_levels @=@ 0000 @=@ UNKNOWN +dnet:review_levels @=@ 0002 @=@ 80 大阪経大学会「Working Paper」 +dnet:review_levels @=@ 0002 @=@ AO +dnet:review_levels @=@ 0002 @=@ ARTICLE SANS COMITE DE LECTURE (ASCL) +dnet:review_levels @=@ 0002 @=@ Arbeitspapier +dnet:review_levels @=@ 0002 @=@ Arbeitspapier [workingPaper] +dnet:review_levels @=@ 0002 @=@ Article (author) +dnet:review_levels @=@ 0002 @=@ Article type: preprint +dnet:review_levels @=@ 0002 @=@ Article(author version) +dnet:review_levels @=@ 0002 @=@ Article, not peer-reviewed +dnet:review_levels @=@ 0002 @=@ Articulo no evaluado +dnet:review_levels @=@ 0002 @=@ Artigo Solicitado e Não Avaliado por Pares +dnet:review_levels @=@ 0002 @=@ Artigo não avaliado pelos pares +dnet:review_levels @=@ 0002 @=@ Artigo não avaliado por pares +dnet:review_levels @=@ 0002 @=@ Artigo não avaliado por pres +dnet:review_levels @=@ 0002 @=@ Artikkeli|Artikkeli ammattilehdessä. Ei vertaisarvioitu +dnet:review_levels @=@ 0002 @=@ Artículo no evaluado +dnet:review_levels @=@ 0002 @=@ Book (non peer-reviewed) +dnet:review_levels @=@ 0002 @=@ Book Part (author) +dnet:review_levels @=@ 0002 @=@ Book item; Non-peer-reviewed +dnet:review_levels @=@ 0002 @=@ Conference preprint +dnet:review_levels @=@ 0002 @=@ Contribution to book (non peer-reviewed) +dnet:review_levels @=@ 0002 @=@ Discussion Paper +dnet:review_levels @=@ 0002 @=@ Document de travail (Working Paper) +dnet:review_levels @=@ 0002 @=@ Documento de trabajo +dnet:review_levels @=@ 0002 @=@ Documento de trabajo de investigaci??n +dnet:review_levels @=@ 0002 @=@ Draft +dnet:review_levels @=@ 0002 @=@ E-pub ahead of print +dnet:review_levels @=@ 0002 @=@ Editorial de revista, no evaluado por pares +dnet:review_levels @=@ 0002 @=@ Editorial de revista, não avaliado por pares +dnet:review_levels @=@ 0002 @=@ Editorial não avaliado pelos pares +dnet:review_levels @=@ 0002 @=@ Editors (non peer-reviewed) +dnet:review_levels @=@ 0002 @=@ Epub ahead of print +dnet:review_levels @=@ 0002 @=@ Hakemlik Sürecinden Geçmiş Makale +dnet:review_levels @=@ 0002 @=@ Hakemlik sürecindeki makale +dnet:review_levels @=@ 0002 @=@ Hakemlik sürecinden geçmemiş kitap değerlendirmesi +dnet:review_levels @=@ 0002 @=@ Journal Article (author version) +dnet:review_levels @=@ 0002 @=@ Journal Article Preprint +dnet:review_levels @=@ 0002 @=@ Journal Editorial, not peer-reviewed +dnet:review_levels @=@ 0002 @=@ Journal article; Non-peer-reviewed +dnet:review_levels @=@ 0002 @=@ Journal:WorkingPaper +dnet:review_levels @=@ 0002 @=@ Manuscript (preprint) +dnet:review_levels @=@ 0002 @=@ Monográfico (Informes, Documentos de trabajo, etc.) +dnet:review_levels @=@ 0002 @=@ NOTE INTERNE OU DE TRAVAIL +dnet:review_levels @=@ 0002 @=@ Nicht begutachteter Beitrag +dnet:review_levels @=@ 0002 @=@ No evaluado por pares +dnet:review_levels @=@ 0002 @=@ Non-Refereed +dnet:review_levels @=@ 0002 @=@ Non-refeered article +dnet:review_levels @=@ 0002 @=@ Non-refereed Article +dnet:review_levels @=@ 0002 @=@ Non-refereed Book Review +dnet:review_levels @=@ 0002 @=@ Non-refereed Review +dnet:review_levels @=@ 0002 @=@ Non-refereed Text +dnet:review_levels @=@ 0002 @=@ NonPeerReviewed +dnet:review_levels @=@ 0002 @=@ Not Peer reviewed +dnet:review_levels @=@ 0002 @=@ Not Reviewed +dnet:review_levels @=@ 0002 @=@ Not peer-reviewed +dnet:review_levels @=@ 0002 @=@ Não Avaliado por Pares +dnet:review_levels @=@ 0002 @=@ Não avaliada pelos pares +dnet:review_levels @=@ 0002 @=@ Não avaliado pelos pares +dnet:review_levels @=@ 0002 @=@ Original article (non peer-reviewed) +dnet:review_levels @=@ 0002 @=@ Other publication (non peer-review) +dnet:review_levels @=@ 0002 @=@ Pre Print +dnet:review_levels @=@ 0002 @=@ Pre-print +dnet:review_levels @=@ 0002 @=@ Preprint Article +dnet:review_levels @=@ 0002 @=@ Preprints +dnet:review_levels @=@ 0002 @=@ Preprints, Working Papers, ... +dnet:review_levels @=@ 0002 @=@ Rapporto tecnico / Working Paper / Rapporto di progetto +dnet:review_levels @=@ 0002 @=@ Resumo Não Avaliado por Pares +dnet:review_levels @=@ 0002 @=@ Review article (non peer-reviewed) +dnet:review_levels @=@ 0002 @=@ SMUR +dnet:review_levels @=@ 0002 @=@ Submissão dos artigos +dnet:review_levels @=@ 0002 @=@ Submitted version +dnet:review_levels @=@ 0002 @=@ Vertaisarvioimaton kirjan tai muun kokoomateoksen osa +dnet:review_levels @=@ 0002 @=@ Vorabdruck +dnet:review_levels @=@ 0002 @=@ Wetensch. publ. non-refereed +dnet:review_levels @=@ 0002 @=@ Working / discussion paper +dnet:review_levels @=@ 0002 @=@ Working Document +dnet:review_levels @=@ 0002 @=@ Working Notes +dnet:review_levels @=@ 0002 @=@ Working Paper +dnet:review_levels @=@ 0002 @=@ Working Paper / Technical Report +dnet:review_levels @=@ 0002 @=@ Working Papers +dnet:review_levels @=@ 0002 @=@ WorkingPaper +dnet:review_levels @=@ 0002 @=@ article in non peer-reviewed journal +dnet:review_levels @=@ 0002 @=@ articolo preliminare +dnet:review_levels @=@ 0002 @=@ articulo preliminar +dnet:review_levels @=@ 0002 @=@ articulo sin revision por pares +dnet:review_levels @=@ 0002 @=@ artigo preliminar +dnet:review_levels @=@ 0002 @=@ artigo sem revisão +dnet:review_levels @=@ 0002 @=@ artículo preliminar +dnet:review_levels @=@ 0002 @=@ artículo sin revisión por pares +dnet:review_levels @=@ 0002 @=@ bookchapter (author version) +dnet:review_levels @=@ 0002 @=@ borrador +dnet:review_levels @=@ 0002 @=@ column (author version) +dnet:review_levels @=@ 0002 @=@ communication_invitee +dnet:review_levels @=@ 0002 @=@ doc-type:preprint +dnet:review_levels @=@ 0002 @=@ doc-type:workingPaper +dnet:review_levels @=@ 0002 @=@ draf +dnet:review_levels @=@ 0002 @=@ eu-repo/semantics/submittedVersion +dnet:review_levels @=@ 0002 @=@ http://purl.org/coar/resource_type/c_8042 +dnet:review_levels @=@ 0002 @=@ http://purl.org/coar/resource_type/c_816b +dnet:review_levels @=@ 0002 @=@ http://purl.org/coar/version/c_71e4c1898caa6e32 +dnet:review_levels @=@ 0002 @=@ http://purl.org/coar/version/c_b1a7d7d4d402bcce +dnet:review_levels @=@ 0002 @=@ http://purl.org/eprint/type/SubmittedBookItem +dnet:review_levels @=@ 0002 @=@ http://purl.org/eprint/type/SubmittedJournalArticle +dnet:review_levels @=@ 0002 @=@ http://purl.org/info:eu-repo/semantics/authorVersion +dnet:review_levels @=@ 0002 @=@ http://purl.org/info:eu-repo/semantics/submittedVersion +dnet:review_levels @=@ 0002 @=@ http://purl.org/spar/fabio/Preprint +dnet:review_levels @=@ 0002 @=@ http://purl.org/spar/fabio/WorkingPaper +dnet:review_levels @=@ 0002 @=@ https://dictionary.casrai.org/Preprint +dnet:review_levels @=@ 0002 @=@ info:ar-repo/semantics/documento de trabajo +dnet:review_levels @=@ 0002 @=@ info:ar-repo/semantics/documentoDeTrabajo +dnet:review_levels @=@ 0002 @=@ info:eu repo/semantics/draft +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/authorVersion +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/draft +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/preprint +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/submitedVersion +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/submittedVersion +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/unReviewed +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/updatedVersion +dnet:review_levels @=@ 0002 @=@ info:eu-repo/semantics/workingPaper +dnet:review_levels @=@ 0002 @=@ info:eu-repo/submittedVersion +dnet:review_levels @=@ 0002 @=@ info:ulb-repo/semantics/articleNonPeerReview +dnet:review_levels @=@ 0002 @=@ info:ulb-repo/semantics/openurl/vlink-workingpaper +dnet:review_levels @=@ 0002 @=@ info:ulb-repo/semantics/workingPaper +dnet:review_levels @=@ 0002 @=@ non peer-reviewed article +dnet:review_levels @=@ 0002 @=@ non-refereed review article +dnet:review_levels @=@ 0002 @=@ não avaliado +dnet:review_levels @=@ 0002 @=@ preprint +dnet:review_levels @=@ 0002 @=@ prepublicación +dnet:review_levels @=@ 0002 @=@ proceeding, seminar, workshop without peer review +dnet:review_levels @=@ 0002 @=@ proceedings (author version) +dnet:review_levels @=@ 0002 @=@ pré-print +dnet:review_levels @=@ 0002 @=@ pré-publication +dnet:review_levels @=@ 0002 @=@ préprint +dnet:review_levels @=@ 0002 @=@ prépublication +dnet:review_levels @=@ 0002 @=@ publicació preliminar +dnet:review_levels @=@ 0002 @=@ publication-preprint +dnet:review_levels @=@ 0002 @=@ publication-workingpaper +dnet:review_levels @=@ 0002 @=@ submitedVersion +dnet:review_levels @=@ 0002 @=@ submittedVersion +dnet:review_levels @=@ 0002 @=@ voordruk +dnet:review_levels @=@ 0002 @=@ workingPaper +dnet:review_levels @=@ 0002 @=@ ön baskı +dnet:review_levels @=@ 0002 @=@ Препринт +dnet:review_levels @=@ 0002 @=@ предпечатная версия публикации +dnet:review_levels @=@ 0002 @=@ препринт статьи +dnet:review_levels @=@ 0002 @=@ ディスカッション/ワーキング・ペーパー DP/WP +dnet:review_levels @=@ 0002 @=@ プレプリント +dnet:review_levels @=@ 0002 @=@ プレプリント Preprint +dnet:review_levels @=@ 0002 @=@ プレプリント(Preprint) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-その他(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-テクニカルレポート類(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-会議発表論文(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-図書(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-学術雑誌論文(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-紀要論文(査読無し) +dnet:review_levels @=@ 0002 @=@ 印刷物/電子媒体-雑誌記事(査読無し) +dnet:review_levels @=@ 0002 @=@ 预印本 +dnet:review_levels @=@ 0001 @=@ ##rt.metadata.pkp.peerReviewed## +dnet:review_levels @=@ 0001 @=@ A1 Alkuperäisartikkeli tieteellisessä aikakauslehdessä +dnet:review_levels @=@ 0001 @=@ Art?culo revisado por pares +dnet:review_levels @=@ 0001 @=@ Article revisat per persones expertes +dnet:review_levels @=@ 0001 @=@ Article type: peer review +dnet:review_levels @=@ 0001 @=@ Article évalué par les pairs +dnet:review_levels @=@ 0001 @=@ Article évalué par des pairs +dnet:review_levels @=@ 0001 @=@ Article évalué par les pairs +dnet:review_levels @=@ 0001 @=@ Articolo valutato secondo i criteri della peer review +dnet:review_levels @=@ 0001 @=@ Articulo evaluado por dos pares +dnet:review_levels @=@ 0001 @=@ Articulo revisado por pares +dnet:review_levels @=@ 0001 @=@ Artigo Avaliado pelos Pares +dnet:review_levels @=@ 0001 @=@ Artigo Revisto por Pares +dnet:review_levels @=@ 0001 @=@ Artigo avaliado por blind peer review +dnet:review_levels @=@ 0001 @=@ Artigo avaliado por pares +dnet:review_levels @=@ 0001 @=@ Artigo de convidado. Avaliado pelos pares +dnet:review_levels @=@ 0001 @=@ Artigos; Avaliado pelos pares +dnet:review_levels @=@ 0001 @=@ Artículo de investigación, Investigaciones originales, Artículo evaluado por pares, Investigaciones empíricas +dnet:review_levels @=@ 0001 @=@ Artículo evaluado por pares +dnet:review_levels @=@ 0001 @=@ Artículo evaluado por pares, Ensayos de investigación +dnet:review_levels @=@ 0001 @=@ Artículo evaluado por pares, Investigaciones empíricas, Artículos de investigación +dnet:review_levels @=@ 0001 @=@ Artículo revisado +dnet:review_levels @=@ 0001 @=@ Artículo revisado por pares +dnet:review_levels @=@ 0001 @=@ Artículos de estudiantes, Artículo evaluado por pares, Artículos de investigación +dnet:review_levels @=@ 0001 @=@ Artículos de investigación evaluados por doble ciego +dnet:review_levels @=@ 0001 @=@ Artículos evaluadores por doble ciego +dnet:review_levels @=@ 0001 @=@ Artículos evaluados por pares +dnet:review_levels @=@ 0001 @=@ Artículos evaluados por pares académicos +dnet:review_levels @=@ 0001 @=@ Artículos revisados por pares +dnet:review_levels @=@ 0001 @=@ Avaliadas pelos pares +dnet:review_levels @=@ 0001 @=@ Avaliado anonimamente por pares +dnet:review_levels @=@ 0001 @=@ Avaliado em duplo cego por pares +dnet:review_levels @=@ 0001 @=@ Avaliado pela Editoria +dnet:review_levels @=@ 0001 @=@ Avaliado pela Editoria. Avaliado pelos pares. +dnet:review_levels @=@ 0001 @=@ Avaliado pelo Editoria +dnet:review_levels @=@ 0001 @=@ Avaliado pelo pares +dnet:review_levels @=@ 0001 @=@ Avaliado pelos Editores +dnet:review_levels @=@ 0001 @=@ Avaliado pelos pares +dnet:review_levels @=@ 0001 @=@ Avaliado pelos pares, Artigo de convidado +dnet:review_levels @=@ 0001 @=@ Avaliado pelos pares, Artigos Originais +dnet:review_levels @=@ 0001 @=@ Avaliado pelos pares, Artigos Originais, Artigos de Revisão +dnet:review_levels @=@ 0001 @=@ Avaliado pelos pares. Avaliado pelo Editoria +dnet:review_levels @=@ 0001 @=@ Avaliado po Pares +dnet:review_levels @=@ 0001 @=@ Avaliado por Editor +dnet:review_levels @=@ 0001 @=@ Avaliado por pares +dnet:review_levels @=@ 0001 @=@ Avaliados pelos pares +dnet:review_levels @=@ 0001 @=@ Avaliados por Pares +dnet:review_levels @=@ 0001 @=@ Blind Peer-reviewed Article +dnet:review_levels @=@ 0001 @=@ Book (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Comentario de libros, Comentario de revistas, Comentario de conferencias, Artículo evaluado por pares, Artículo de investigación +dnet:review_levels @=@ 0001 @=@ Conference paper; Peer-reviewed +dnet:review_levels @=@ 0001 @=@ Contribution to book (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Documento Avaliado por Pares +dnet:review_levels @=@ 0001 @=@ Double blind evaluation articles +dnet:review_levels @=@ 0001 @=@ Double blind peer review +dnet:review_levels @=@ 0001 @=@ Editors (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Evaluación por pares +dnet:review_levels @=@ 0001 @=@ Evaluado por pares +dnet:review_levels @=@ 0001 @=@ Evaluados por los pares +dnet:review_levels @=@ 0001 @=@ Hakem sürecinden geçmiş makale +dnet:review_levels @=@ 0001 @=@ Hakemli makale +dnet:review_levels @=@ 0001 @=@ Hakemlik Sürecinden Geçmiş +dnet:review_levels @=@ 0001 @=@ Invited Peer-Reviewed Article +dnet:review_levels @=@ 0001 @=@ Journal article; Peer-reviewed +dnet:review_levels @=@ 0001 @=@ Original article (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Other publication (peer-review) +dnet:review_levels @=@ 0001 @=@ Paper peer-reviewed +dnet:review_levels @=@ 0001 @=@ Papers evaluated by academic peers +dnet:review_levels @=@ 0001 @=@ Peer reviewed +dnet:review_levels @=@ 0001 @=@ Peer reviewed article +dnet:review_levels @=@ 0001 @=@ Peer reviewed invited commentry +dnet:review_levels @=@ 0001 @=@ Peer-Reviewed Protocol +dnet:review_levels @=@ 0001 @=@ Peer-reviewd Article +dnet:review_levels @=@ 0001 @=@ Peer-reviewed +dnet:review_levels @=@ 0001 @=@ Peer-reviewed Article +dnet:review_levels @=@ 0001 @=@ Peer-reviewed Paper +dnet:review_levels @=@ 0001 @=@ Peer-reviewed Review +dnet:review_levels @=@ 0001 @=@ Peer-reviewed Review Article +dnet:review_levels @=@ 0001 @=@ Peer-reviewed Text +dnet:review_levels @=@ 0001 @=@ Peer-reviewed communication +dnet:review_levels @=@ 0001 @=@ Peer-reviewed conference proceedings +dnet:review_levels @=@ 0001 @=@ Peer-reviewed research article +dnet:review_levels @=@ 0001 @=@ Peer-reviewed short communication +dnet:review_levels @=@ 0001 @=@ PeerReviewed +dnet:review_levels @=@ 0001 @=@ Proceedings (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Refereed +dnet:review_levels @=@ 0001 @=@ Refereed Article +dnet:review_levels @=@ 0001 @=@ Research articles evaluated by double blind +dnet:review_levels @=@ 0001 @=@ Resenha avaliada pelos pares +dnet:review_levels @=@ 0001 @=@ Review article (peer-reviewed) +dnet:review_levels @=@ 0001 @=@ Reviewed by peers +dnet:review_levels @=@ 0001 @=@ Revisión por Expertos +dnet:review_levels @=@ 0001 @=@ Revisto por Pares +dnet:review_levels @=@ 0001 @=@ SBBq abstracts / peer-reviewed +dnet:review_levels @=@ 0001 @=@ SBBq resúmenes - revisada por pares +dnet:review_levels @=@ 0001 @=@ Scholarly publ. Refereed +dnet:review_levels @=@ 0001 @=@ Scientific Publ (refereed) +dnet:review_levels @=@ 0001 @=@ Vertaisarvioimaton kirjoitus tieteellisessä aikakauslehdessä +dnet:review_levels @=@ 0001 @=@ Vertaisarvioitu alkuperäisartikkeli tieteellisessä aikakauslehdessä +dnet:review_levels @=@ 0001 @=@ Vertaisarvioitu artikkeli konferenssijulkaisussa +dnet:review_levels @=@ 0001 @=@ Vertaisarvioitu artikkeli tieteellisessä aikakauslehdessä +dnet:review_levels @=@ 0001 @=@ Vertaisarvioitu kirjan tai muun kokoomateoksen osa +dnet:review_levels @=@ 0001 @=@ Wetensch. publ. Refereed +dnet:review_levels @=@ 0001 @=@ article in peer-reviewed journal +dnet:review_levels @=@ 0001 @=@ articles validés +dnet:review_levels @=@ 0001 @=@ avaliado por pares, temas livres +dnet:review_levels @=@ 0001 @=@ info:eu-repo/semantics/peerReviewed +dnet:review_levels @=@ 0001 @=@ info:ulb-repo/semantics/articlePeerReview +dnet:review_levels @=@ 0001 @=@ proceeding with peer review +dnet:review_levels @=@ 0001 @=@ refereed_publications +dnet:review_levels @=@ 0001 @=@ ul_published_reviewed +dnet:review_levels @=@ 0001 @=@ Άρθρο που έχει αξιολογηθεί από ομότιμους ειδικούς +dnet:review_levels @=@ 0001 @=@ Άρθρο το οποίο έχει περάσει από ομότιμη αξιολόγηση +dnet:review_levels @=@ 0001 @=@ レフェリー付き論文 +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-テクニカルレポート類(査読有り) +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-会議発表論文(査読有り) +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-図書(査読有り) +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-学術雑誌論文(査読有り) +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-紀要論文(査読有り) +dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-雑誌記事(査読有り) +dnet:review_levels @=@ 0001 @=@ 原著論文(査読有り) +dnet:review_levels @=@ 0001 @=@ 査読論文 \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/terms.txt b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/terms.txt new file mode 100644 index 000000000..59bed7c3a --- /dev/null +++ b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/clean/terms.txt @@ -0,0 +1,1079 @@ +ModularUiLabels @=@ ModularUiLabels @=@ PendingRepositoryResources @=@ Pending datasource +ModularUiLabels @=@ ModularUiLabels @=@ RepositoryServiceResources @=@ Valid datasource +dnet:content_description_typologies @=@ D-Net Content Description Typologies @=@ file::EuropePMC @=@ file::EuropePMC +dnet:content_description_typologies @=@ D-Net Content Description Typologies @=@ file::PDF @=@ file::PDF +dnet:content_description_typologies @=@ D-Net Content Description Typologies @=@ file::WoS @=@ file::WoS +dnet:content_description_typologies @=@ D-Net Content Description Typologies @=@ metadata @=@ metadata +dnet:content_description_typologies @=@ D-Net Content Description Typologies @=@ file::hybrid @=@ file::hybrid +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:cris @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:actionset:orcidworks-no-doi @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:infospace @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:aggregator @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:datasetarchive @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:actionset @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:entityregistry @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:crosswalk:repository @=@ Harvested +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:aggregator @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ community:subject @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ community:zenodocommunity @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ iis @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:entityregistry @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ community:organization @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:infospace @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:dedup @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ community:datasource @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ propagation:project:semrel @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:cris @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:repository @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ sysimport:mining:datasetarchive @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ community:semrel @=@ Inferred by OpenAIRE +dnet:provenanceActions @=@ dnet:provenanceActions @=@ user:claim @=@ Linked by user +dnet:provenanceActions @=@ dnet:provenanceActions @=@ user:claim:pid @=@ Linked by user +dnet:provenanceActions @=@ dnet:provenanceActions @=@ user:insert @=@ Linked by user +dnet:provenanceActions @=@ dnet:provenanceActions @=@ user:claim:search @=@ Linked by user +dnet:provenanceActions @=@ dnet:provenanceActions @=@ UNKNOWN @=@ UNKNOWN +dnet:provenanceActions @=@ dnet:provenanceActions @=@ country:instrepos @=@ Inferred by OpenAIRE +dnet:access_modes @=@ dnet:access_modes @=@ 12MONTHS @=@ 12 Months Embargo +dnet:access_modes @=@ dnet:access_modes @=@ 6MONTHS @=@ 6 Months Embargo +dnet:access_modes @=@ dnet:access_modes @=@ CLOSED @=@ Closed Access +dnet:access_modes @=@ dnet:access_modes @=@ EMBARGO @=@ Embargo +dnet:access_modes @=@ dnet:access_modes @=@ OPEN @=@ Open Access +dnet:access_modes @=@ dnet:access_modes @=@ OPEN SOURCE @=@ Open Source +dnet:access_modes @=@ dnet:access_modes @=@ OTHER @=@ Other +dnet:access_modes @=@ dnet:access_modes @=@ RESTRICTED @=@ Restricted +dnet:access_modes @=@ dnet:access_modes @=@ UNKNOWN @=@ not available +fct:funding_typologies @=@ fct:funding_typologies @=@ fct:program @=@ fct:program +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ openaire2.0 @=@ OpenAIRE 2.0 (EC funding) +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ openaire3.0 @=@ OpenAIRE 3.0 (OA, funding) +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ driver @=@ OpenAIRE Basic (DRIVER OA) +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ openaire-cris_1.1 @=@ OpenAIRE CRIS v1.1 +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ openaire2.0_data @=@ OpenAIRE Data (funded, referenced datasets) +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ openaire-pub_4.0 @=@ OpenAIRE PubRepos v4.0 +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ hostedBy @=@ collected from a compatible aggregator +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ files @=@ files +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ native @=@ native +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ UNKNOWN @=@ not available +dnet:compatibilityLevel @=@ dnet:compatibilityLevel @=@ notCompatible @=@ under validation +dnet:dataCite_date @=@ dnet:dataCite_date @=@ UNKNOWN @=@ UNKNOWN +dnet:dataCite_date @=@ dnet:dataCite_date @=@ available @=@ available +dnet:dataCite_date @=@ dnet:dataCite_date @=@ copyrighted @=@ copyrighted +dnet:dataCite_date @=@ dnet:dataCite_date @=@ created @=@ created +dnet:dataCite_date @=@ dnet:dataCite_date @=@ endDate @=@ endDate +dnet:dataCite_date @=@ dnet:dataCite_date @=@ issued @=@ issued +dnet:dataCite_date @=@ dnet:dataCite_date @=@ startDate @=@ startDate +dnet:dataCite_date @=@ dnet:dataCite_date @=@ submitted @=@ submitted +dnet:dataCite_date @=@ dnet:dataCite_date @=@ updated @=@ updated +dnet:dataCite_date @=@ dnet:dataCite_date @=@ valid @=@ valid +dnet:dataCite_date @=@ dnet:dataCite_date @=@ published-print @=@ published-print +dnet:dataCite_date @=@ dnet:dataCite_date @=@ published-online @=@ published-online +dnet:dataCite_date @=@ dnet:dataCite_date @=@ accepted @=@ accepted +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ crissystem @=@ CRIS System +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ datarepository::unknown @=@ Data Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ aggregator::datarepository @=@ Data Repository Aggregator +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry::projects @=@ Funder database +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ infospace @=@ Information Space +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubsrepository::institutional @=@ Institutional Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ aggregator::pubsrepository::institutional @=@ Institutional Repository Aggregator +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubsrepository::journal @=@ Journal +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ aggregator::pubsrepository::journals @=@ Journal Aggregator/Publisher +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubsrepository::mock @=@ Other +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubscatalogue::unknown @=@ Publication Catalogue +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubsrepository::unknown @=@ Publication Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ aggregator::pubsrepository::unknown @=@ Publication Repository Aggregator +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry @=@ Registry +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry::repositories @=@ Registry of repositories +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry::products @=@ Registry of research products +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry::researchers @=@ Registry of researchers +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ entityregistry::organizations @=@ Registry of organizations +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ scholarcomminfra @=@ Scholarly Comm. Infrastructure +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ softwarerepository @=@ Software Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ pubsrepository::thematic @=@ Thematic Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ websource @=@ Web Source +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ aggregator::softwarerepository @=@ Software Repository Aggregator +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ orprepository @=@ Repository +dnet:datasource_typologies @=@ dnet:datasource_typologies @=@ researchgraph @=@ Research Graph +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ ACM @=@ ACM Computing Classification System +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ agrovoc @=@ AGROVOC +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ bicssc @=@ BIC standard subject categories +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ DFG @=@ DFG Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ ddc @=@ Dewey Decimal Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ nsf:fieldOfApplication @=@ Field of Application (NSF) +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ gok @=@ Göttingen Online Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ ec:h2020topics @=@ Horizon 2020 Topics +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ IPC @=@ International Patent Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ jel @=@ JEL Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ lcsh @=@ Library of Congress Subject Headings +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ msc @=@ Mathematics Subject Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ mesheuropmc @=@ Medical Subject Headings +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ mesh @=@ Medical Subject Headings +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ bk @=@ Nederlandse basisclassificatie +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ dnet:od_subjects @=@ OpenDOAR subjects +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ ocis @=@ Optics Classification and Indexing Scheme +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ pacs @=@ Physics and Astronomy Classification Scheme +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ rvk @=@ Regensburger Verbundklassifikation +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ UNKNOWN @=@ UNKNOWN +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ udc @=@ Universal Decimal Classification +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ wos @=@ Web of Science Subject Areas +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ arxiv @=@ arXiv +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ keyword @=@ keyword +dnet:subject_classification_typologies @=@ dnet:subject_classification_typologies @=@ MAG @=@ Microsoft Academic Graph classification +fct:contractTypes @=@ fct:contractTypes @=@ UNKNOWN @=@ UNKNOWN +dnet:publication_resource @=@ dnet:publication_resource @=@ 0018 @=@ Annotation +dnet:publication_resource @=@ dnet:publication_resource @=@ 0001 @=@ Article +dnet:publication_resource @=@ dnet:publication_resource @=@ 0033 @=@ Audiovisual +dnet:publication_resource @=@ dnet:publication_resource @=@ 0008 @=@ Bachelor thesis +dnet:publication_resource @=@ dnet:publication_resource @=@ 0046 @=@ Bioentity +dnet:publication_resource @=@ dnet:publication_resource @=@ 0002 @=@ Book +dnet:publication_resource @=@ dnet:publication_resource @=@ 0037 @=@ Clinical Trial +dnet:publication_resource @=@ dnet:publication_resource @=@ 0022 @=@ Collection +dnet:publication_resource @=@ dnet:publication_resource @=@ 0004 @=@ Conference object +dnet:publication_resource @=@ dnet:publication_resource @=@ 0005 @=@ Contribution for newspaper or weekly magazine +dnet:publication_resource @=@ dnet:publication_resource @=@ 0045 @=@ Data Management Plan +dnet:publication_resource @=@ dnet:publication_resource @=@ 0031 @=@ Data Paper +dnet:publication_resource @=@ dnet:publication_resource @=@ 0021 @=@ Dataset +dnet:publication_resource @=@ dnet:publication_resource @=@ 0006 @=@ Doctoral thesis +dnet:publication_resource @=@ dnet:publication_resource @=@ 0023 @=@ Event +dnet:publication_resource @=@ dnet:publication_resource @=@ 0009 @=@ External research report +dnet:publication_resource @=@ dnet:publication_resource @=@ 0024 @=@ Film +dnet:publication_resource @=@ dnet:publication_resource @=@ 0025 @=@ Image +dnet:publication_resource @=@ dnet:publication_resource @=@ 0026 @=@ InteractiveResource +dnet:publication_resource @=@ dnet:publication_resource @=@ 0011 @=@ Internal report +dnet:publication_resource @=@ dnet:publication_resource @=@ 0043 @=@ Journal +dnet:publication_resource @=@ dnet:publication_resource @=@ 0010 @=@ Lecture +dnet:publication_resource @=@ dnet:publication_resource @=@ 0007 @=@ Master thesis +dnet:publication_resource @=@ dnet:publication_resource @=@ 0027 @=@ Model +dnet:publication_resource @=@ dnet:publication_resource @=@ 0012 @=@ Newsletter +dnet:publication_resource @=@ dnet:publication_resource @=@ 0020 @=@ Other ORP type +dnet:publication_resource @=@ dnet:publication_resource @=@ 0039 @=@ Other dataset type +dnet:publication_resource @=@ dnet:publication_resource @=@ 0038 @=@ Other literature type +dnet:publication_resource @=@ dnet:publication_resource @=@ 0040 @=@ Other software type +dnet:publication_resource @=@ dnet:publication_resource @=@ 0013 @=@ Part of book or chapter of book +dnet:publication_resource @=@ dnet:publication_resource @=@ 0019 @=@ Patent +dnet:publication_resource @=@ dnet:publication_resource @=@ 0028 @=@ PhysicalObject +dnet:publication_resource @=@ dnet:publication_resource @=@ 0016 @=@ Preprint +dnet:publication_resource @=@ dnet:publication_resource @=@ 0034 @=@ Project deliverable +dnet:publication_resource @=@ dnet:publication_resource @=@ 0035 @=@ Project milestone +dnet:publication_resource @=@ dnet:publication_resource @=@ 0036 @=@ Project proposal +dnet:publication_resource @=@ dnet:publication_resource @=@ 0017 @=@ Report +dnet:publication_resource @=@ dnet:publication_resource @=@ 0014 @=@ Research +dnet:publication_resource @=@ dnet:publication_resource @=@ 0015 @=@ Review +dnet:publication_resource @=@ dnet:publication_resource @=@ 0029 @=@ Software +dnet:publication_resource @=@ dnet:publication_resource @=@ 0032 @=@ Software Paper +dnet:publication_resource @=@ dnet:publication_resource @=@ 0030 @=@ Sound +dnet:publication_resource @=@ dnet:publication_resource @=@ 0044 @=@ Thesis +dnet:publication_resource @=@ dnet:publication_resource @=@ 0000 @=@ Unknown +dnet:publication_resource @=@ dnet:publication_resource @=@ 0042 @=@ Virtual Appliance +ec:funding_typologies @=@ ec:funding_typologies @=@ ec:frameworkprogram @=@ frameworkprogram +ec:funding_typologies @=@ ec:funding_typologies @=@ ec:program @=@ program +ec:funding_typologies @=@ ec:funding_typologies @=@ ec:specificprogram @=@ specificprogram +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ 171 @=@ Article 171 of the Treaty +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ BSG @=@ Research for the benefit of specific groups +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ CIP-EIP-TN @=@ CIP-Eco-Innovation - CIP-Thematic Network +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ CP @=@ Collaborative project +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ CP-CSA @=@ Combination of CP & CSA +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ CSA @=@ Coordination and support action +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ ERC @=@ Support for frontier research (ERC) +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ MC @=@ Support for training and career development of researchers (Marie Curie) +ec:FP7contractTypes @=@ ec:FP7contractTypes @=@ NoE @=@ Network of Excellence +wt:funding_relations @=@ wt:funding_relations @=@ wt:hasParentFunding @=@ wt:hasParentFunding +dnet:languages @=@ dnet:languages @=@ abk @=@ Abkhazian +dnet:languages @=@ dnet:languages @=@ ace @=@ Achinese +dnet:languages @=@ dnet:languages @=@ ach @=@ Acoli +dnet:languages @=@ dnet:languages @=@ ada @=@ Adangme +dnet:languages @=@ dnet:languages @=@ aar @=@ Afar +dnet:languages @=@ dnet:languages @=@ afh @=@ Afrihili +dnet:languages @=@ dnet:languages @=@ afr @=@ Afrikaans +dnet:languages @=@ dnet:languages @=@ afa @=@ Afro-Asiatic +dnet:languages @=@ dnet:languages @=@ aka @=@ Akan +dnet:languages @=@ dnet:languages @=@ akk @=@ Akkadian +dnet:languages @=@ dnet:languages @=@ alb/sqi @=@ Albanian +dnet:languages @=@ dnet:languages @=@ ale @=@ Aleut +dnet:languages @=@ dnet:languages @=@ alg @=@ Algonquian languages +dnet:languages @=@ dnet:languages @=@ tut @=@ Altaic +dnet:languages @=@ dnet:languages @=@ amh @=@ Amharic +dnet:languages @=@ dnet:languages @=@ egy @=@ Ancient Egyptian +dnet:languages @=@ dnet:languages @=@ grc @=@ Ancient Greek +dnet:languages @=@ dnet:languages @=@ apa @=@ Apache +dnet:languages @=@ dnet:languages @=@ ara @=@ Arabic +dnet:languages @=@ dnet:languages @=@ arg @=@ Aragonese +dnet:languages @=@ dnet:languages @=@ arc @=@ Aramaic +dnet:languages @=@ dnet:languages @=@ arp @=@ Arapaho +dnet:languages @=@ dnet:languages @=@ arn @=@ Araucanian +dnet:languages @=@ dnet:languages @=@ arw @=@ Arawak +dnet:languages @=@ dnet:languages @=@ arm/hye @=@ Armenian +dnet:languages @=@ dnet:languages @=@ art @=@ Artificial +dnet:languages @=@ dnet:languages @=@ asm @=@ Assamese +dnet:languages @=@ dnet:languages @=@ ath @=@ Athapascan +dnet:languages @=@ dnet:languages @=@ map @=@ Austronesian +dnet:languages @=@ dnet:languages @=@ ina @=@ Auxiliary Language Association) +dnet:languages @=@ dnet:languages @=@ ava @=@ Avaric +dnet:languages @=@ dnet:languages @=@ ave @=@ Avestan +dnet:languages @=@ dnet:languages @=@ awa @=@ Awadhi +dnet:languages @=@ dnet:languages @=@ aym @=@ Aymara +dnet:languages @=@ dnet:languages @=@ aze @=@ Azerbaijani +dnet:languages @=@ dnet:languages @=@ nah @=@ Aztec +dnet:languages @=@ dnet:languages @=@ ban @=@ Balinese +dnet:languages @=@ dnet:languages @=@ bat @=@ Baltic +dnet:languages @=@ dnet:languages @=@ bal @=@ Baluchi +dnet:languages @=@ dnet:languages @=@ bam @=@ Bambara +dnet:languages @=@ dnet:languages @=@ bai @=@ Bamileke +dnet:languages @=@ dnet:languages @=@ bad @=@ Banda +dnet:languages @=@ dnet:languages @=@ bnt @=@ Bantu +dnet:languages @=@ dnet:languages @=@ bas @=@ Basa +dnet:languages @=@ dnet:languages @=@ bak @=@ Bashkir +dnet:languages @=@ dnet:languages @=@ baq/eus @=@ Basque +dnet:languages @=@ dnet:languages @=@ bej @=@ Beja +dnet:languages @=@ dnet:languages @=@ bel @=@ Belarusian +dnet:languages @=@ dnet:languages @=@ bem @=@ Bemba +dnet:languages @=@ dnet:languages @=@ ben @=@ Bengali +dnet:languages @=@ dnet:languages @=@ ber @=@ Berber +dnet:languages @=@ dnet:languages @=@ bho @=@ Bhojpuri +dnet:languages @=@ dnet:languages @=@ bih @=@ Bihari +dnet:languages @=@ dnet:languages @=@ bik @=@ Bikol +dnet:languages @=@ dnet:languages @=@ bin @=@ Bini +dnet:languages @=@ dnet:languages @=@ bis @=@ Bislama +dnet:languages @=@ dnet:languages @=@ nob @=@ Bokmål, Norwegian; Norwegian Bokmål +dnet:languages @=@ dnet:languages @=@ bos @=@ Bosnian +dnet:languages @=@ dnet:languages @=@ bra @=@ Braj +dnet:languages @=@ dnet:languages @=@ bre @=@ Breton +dnet:languages @=@ dnet:languages @=@ bug @=@ Buginese +dnet:languages @=@ dnet:languages @=@ bul @=@ Bulgarian +dnet:languages @=@ dnet:languages @=@ bua @=@ Buriat +dnet:languages @=@ dnet:languages @=@ bur/mya @=@ Burmese +dnet:languages @=@ dnet:languages @=@ cad @=@ Caddo +dnet:languages @=@ dnet:languages @=@ car @=@ Carib +dnet:languages @=@ dnet:languages @=@ cat @=@ Catalan; Valencian +dnet:languages @=@ dnet:languages @=@ cau @=@ Caucasian +dnet:languages @=@ dnet:languages @=@ ceb @=@ Cebuano +dnet:languages @=@ dnet:languages @=@ cel @=@ Celtic +dnet:languages @=@ dnet:languages @=@ cai @=@ Central American Indian +dnet:languages @=@ dnet:languages @=@ chg @=@ Chagatai +dnet:languages @=@ dnet:languages @=@ cha @=@ Chamorro +dnet:languages @=@ dnet:languages @=@ che @=@ Chechen +dnet:languages @=@ dnet:languages @=@ chr @=@ Cherokee +dnet:languages @=@ dnet:languages @=@ nya @=@ Chewa; Chichewa; Nyanja +dnet:languages @=@ dnet:languages @=@ chy @=@ Cheyenne +dnet:languages @=@ dnet:languages @=@ chb @=@ Chibcha +dnet:languages @=@ dnet:languages @=@ chi/zho @=@ Chinese +dnet:languages @=@ dnet:languages @=@ chn @=@ Chinook jargon +dnet:languages @=@ dnet:languages @=@ cho @=@ Choctaw +dnet:languages @=@ dnet:languages @=@ chu @=@ Church Slavic; Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic +dnet:languages @=@ dnet:languages @=@ chv @=@ Chuvash +dnet:languages @=@ dnet:languages @=@ cop @=@ Coptic +dnet:languages @=@ dnet:languages @=@ cor @=@ Cornish +dnet:languages @=@ dnet:languages @=@ cos @=@ Corsican +dnet:languages @=@ dnet:languages @=@ cre @=@ Cree +dnet:languages @=@ dnet:languages @=@ mus @=@ Creek +dnet:languages @=@ dnet:languages @=@ crp @=@ Creoles and Pidgins +dnet:languages @=@ dnet:languages @=@ hrv @=@ Croatian +dnet:languages @=@ dnet:languages @=@ cus @=@ Cushitic +dnet:languages @=@ dnet:languages @=@ ces/cze @=@ Czech +dnet:languages @=@ dnet:languages @=@ dak @=@ Dakota +dnet:languages @=@ dnet:languages @=@ dan @=@ Danish +dnet:languages @=@ dnet:languages @=@ del @=@ Delaware +dnet:languages @=@ dnet:languages @=@ din @=@ Dinka +dnet:languages @=@ dnet:languages @=@ div @=@ Divehi +dnet:languages @=@ dnet:languages @=@ doi @=@ Dogri +dnet:languages @=@ dnet:languages @=@ dra @=@ Dravidian +dnet:languages @=@ dnet:languages @=@ dua @=@ Duala +dnet:languages @=@ dnet:languages @=@ dut/nld @=@ Dutch; Flemish +dnet:languages @=@ dnet:languages @=@ dyu @=@ Dyula +dnet:languages @=@ dnet:languages @=@ dzo @=@ Dzongkha +dnet:languages @=@ dnet:languages @=@ efi @=@ Efik +dnet:languages @=@ dnet:languages @=@ eka @=@ Ekajuk +dnet:languages @=@ dnet:languages @=@ elx @=@ Elamite +dnet:languages @=@ dnet:languages @=@ eng @=@ English +dnet:languages @=@ dnet:languages @=@ cpe @=@ English-based Creoles and Pidgins +dnet:languages @=@ dnet:languages @=@ esk @=@ Eskimo +dnet:languages @=@ dnet:languages @=@ epo @=@ Esperanto +dnet:languages @=@ dnet:languages @=@ est @=@ Estonian +dnet:languages @=@ dnet:languages @=@ ewe @=@ Ewe +dnet:languages @=@ dnet:languages @=@ ewo @=@ Ewondo +dnet:languages @=@ dnet:languages @=@ fan @=@ Fang +dnet:languages @=@ dnet:languages @=@ fat @=@ Fanti +dnet:languages @=@ dnet:languages @=@ fao @=@ Faroese +dnet:languages @=@ dnet:languages @=@ fij @=@ Fijian +dnet:languages @=@ dnet:languages @=@ fin @=@ Finnish +dnet:languages @=@ dnet:languages @=@ fiu @=@ Finno-Ugrian +dnet:languages @=@ dnet:languages @=@ fon @=@ Fon +dnet:languages @=@ dnet:languages @=@ fra/fre @=@ French +dnet:languages @=@ dnet:languages @=@ cpf @=@ French-based Creoles and Pidgins +dnet:languages @=@ dnet:languages @=@ fry @=@ Frisian +dnet:languages @=@ dnet:languages @=@ ful @=@ Fulah +dnet:languages @=@ dnet:languages @=@ gaa @=@ Ga +dnet:languages @=@ dnet:languages @=@ gae/gdh @=@ Gaelic +dnet:languages @=@ dnet:languages @=@ gla @=@ Gaelic; Scottish Gaelic +dnet:languages @=@ dnet:languages @=@ glg @=@ Galician +dnet:languages @=@ dnet:languages @=@ lug @=@ Ganda +dnet:languages @=@ dnet:languages @=@ gay @=@ Gayo +dnet:languages @=@ dnet:languages @=@ gez @=@ Geez +dnet:languages @=@ dnet:languages @=@ geo/kat @=@ Georgian +dnet:languages @=@ dnet:languages @=@ deu/ger @=@ German +dnet:languages @=@ dnet:languages @=@ gem @=@ Germanic +dnet:languages @=@ dnet:languages @=@ kik @=@ Gikuyu; Kikuyu +dnet:languages @=@ dnet:languages @=@ gil @=@ Gilbertese +dnet:languages @=@ dnet:languages @=@ gon @=@ Gondi +dnet:languages @=@ dnet:languages @=@ got @=@ Gothic +dnet:languages @=@ dnet:languages @=@ grb @=@ Grebo +dnet:languages @=@ dnet:languages @=@ ell/gre @=@ Greek +dnet:languages @=@ dnet:languages @=@ gre/ell @=@ Greek, Modern (1453-) +dnet:languages @=@ dnet:languages @=@ kal @=@ Greenlandic; Kalaallisut +dnet:languages @=@ dnet:languages @=@ grn @=@ Guarani +dnet:languages @=@ dnet:languages @=@ guj @=@ Gujarati +dnet:languages @=@ dnet:languages @=@ hai @=@ Haida +dnet:languages @=@ dnet:languages @=@ hat @=@ Haitian; Haitian Creole +dnet:languages @=@ dnet:languages @=@ hau @=@ Hausa +dnet:languages @=@ dnet:languages @=@ haw @=@ Hawaiian +dnet:languages @=@ dnet:languages @=@ heb @=@ Hebrew +dnet:languages @=@ dnet:languages @=@ her @=@ Herero +dnet:languages @=@ dnet:languages @=@ hil @=@ Hiligaynon +dnet:languages @=@ dnet:languages @=@ him @=@ Himachali +dnet:languages @=@ dnet:languages @=@ hin @=@ Hindi +dnet:languages @=@ dnet:languages @=@ hmo @=@ Hiri Motu +dnet:languages @=@ dnet:languages @=@ hun @=@ Hungarian +dnet:languages @=@ dnet:languages @=@ hup @=@ Hupa +dnet:languages @=@ dnet:languages @=@ iba @=@ Iban +dnet:languages @=@ dnet:languages @=@ ice/isl @=@ Icelandic +dnet:languages @=@ dnet:languages @=@ ido @=@ Ido +dnet:languages @=@ dnet:languages @=@ ibo @=@ Igbo +dnet:languages @=@ dnet:languages @=@ ijo @=@ Ijo +dnet:languages @=@ dnet:languages @=@ ilo @=@ Iloko +dnet:languages @=@ dnet:languages @=@ inc @=@ Indic +dnet:languages @=@ dnet:languages @=@ ine @=@ Indo-European +dnet:languages @=@ dnet:languages @=@ ind @=@ Indonesian +dnet:languages @=@ dnet:languages @=@ ile @=@ Interlingue +dnet:languages @=@ dnet:languages @=@ iku @=@ Inuktitut +dnet:languages @=@ dnet:languages @=@ ipk @=@ Inupiaq +dnet:languages @=@ dnet:languages @=@ ira @=@ Iranian +dnet:languages @=@ dnet:languages @=@ gai/iri @=@ Irish +dnet:languages @=@ dnet:languages @=@ iro @=@ Iroquoian +dnet:languages @=@ dnet:languages @=@ ita @=@ Italian +dnet:languages @=@ dnet:languages @=@ jpn @=@ Japanese +dnet:languages @=@ dnet:languages @=@ jav @=@ Javanese +dnet:languages @=@ dnet:languages @=@ jrb @=@ Judeo-Arabic +dnet:languages @=@ dnet:languages @=@ jpr @=@ Judeo-Persian +dnet:languages @=@ dnet:languages @=@ kab @=@ Kabyle +dnet:languages @=@ dnet:languages @=@ kac @=@ Kachin +dnet:languages @=@ dnet:languages @=@ kam @=@ Kamba +dnet:languages @=@ dnet:languages @=@ kan @=@ Kannada +dnet:languages @=@ dnet:languages @=@ kau @=@ Kanuri +dnet:languages @=@ dnet:languages @=@ kaa @=@ Kara-Kalpak +dnet:languages @=@ dnet:languages @=@ kar @=@ Karen +dnet:languages @=@ dnet:languages @=@ kas @=@ Kashmiri +dnet:languages @=@ dnet:languages @=@ kaw @=@ Kawi +dnet:languages @=@ dnet:languages @=@ kaz @=@ Kazakh +dnet:languages @=@ dnet:languages @=@ kha @=@ Khasi +dnet:languages @=@ dnet:languages @=@ khm @=@ Khmer +dnet:languages @=@ dnet:languages @=@ khi @=@ Khoisan +dnet:languages @=@ dnet:languages @=@ kho @=@ Khotanese +dnet:languages @=@ dnet:languages @=@ kin @=@ Kinyarwanda +dnet:languages @=@ dnet:languages @=@ kir @=@ Kirghiz +dnet:languages @=@ dnet:languages @=@ kom @=@ Komi +dnet:languages @=@ dnet:languages @=@ kon @=@ Kongo +dnet:languages @=@ dnet:languages @=@ kok @=@ Konkani +dnet:languages @=@ dnet:languages @=@ kor @=@ Korean +dnet:languages @=@ dnet:languages @=@ kpe @=@ Kpelle +dnet:languages @=@ dnet:languages @=@ kro @=@ Kru +dnet:languages @=@ dnet:languages @=@ kua @=@ Kuanyama; Kwanyama +dnet:languages @=@ dnet:languages @=@ kum @=@ Kumyk +dnet:languages @=@ dnet:languages @=@ kur @=@ Kurdish +dnet:languages @=@ dnet:languages @=@ kru @=@ Kurukh +dnet:languages @=@ dnet:languages @=@ kus @=@ Kusaie +dnet:languages @=@ dnet:languages @=@ kut @=@ Kutenai +dnet:languages @=@ dnet:languages @=@ lad @=@ Ladino +dnet:languages @=@ dnet:languages @=@ lah @=@ Lahnda +dnet:languages @=@ dnet:languages @=@ lam @=@ Lamba +dnet:languages @=@ dnet:languages @=@ lao @=@ Lao +dnet:languages @=@ dnet:languages @=@ lat @=@ Latin +dnet:languages @=@ dnet:languages @=@ lav @=@ Latvian +dnet:languages @=@ dnet:languages @=@ ltz @=@ Letzeburgesch; Luxembourgish +dnet:languages @=@ dnet:languages @=@ lez @=@ Lezghian +dnet:languages @=@ dnet:languages @=@ lim @=@ Limburgan; Limburger; Limburgish +dnet:languages @=@ dnet:languages @=@ lin @=@ Lingala +dnet:languages @=@ dnet:languages @=@ lit @=@ Lithuanian +dnet:languages @=@ dnet:languages @=@ loz @=@ Lozi +dnet:languages @=@ dnet:languages @=@ lub @=@ Luba-Katanga +dnet:languages @=@ dnet:languages @=@ lui @=@ Luiseno +dnet:languages @=@ dnet:languages @=@ lun @=@ Lunda +dnet:languages @=@ dnet:languages @=@ luo @=@ Luo +dnet:languages @=@ dnet:languages @=@ mac/mak @=@ Macedonian +dnet:languages @=@ dnet:languages @=@ mad @=@ Madurese +dnet:languages @=@ dnet:languages @=@ mag @=@ Magahi +dnet:languages @=@ dnet:languages @=@ mai @=@ Maithili +dnet:languages @=@ dnet:languages @=@ mak @=@ Makasar +dnet:languages @=@ dnet:languages @=@ mlg @=@ Malagasy +dnet:languages @=@ dnet:languages @=@ may/msa @=@ Malay +dnet:languages @=@ dnet:languages @=@ mal @=@ Malayalam +dnet:languages @=@ dnet:languages @=@ mlt @=@ Maltese +dnet:languages @=@ dnet:languages @=@ man @=@ Mandingo +dnet:languages @=@ dnet:languages @=@ mni @=@ Manipuri +dnet:languages @=@ dnet:languages @=@ mno @=@ Manobo +dnet:languages @=@ dnet:languages @=@ glv @=@ Manx +dnet:languages @=@ dnet:languages @=@ mao/mri @=@ Maori +dnet:languages @=@ dnet:languages @=@ mar @=@ Marathi +dnet:languages @=@ dnet:languages @=@ chm @=@ Mari +dnet:languages @=@ dnet:languages @=@ mah @=@ Marshallese +dnet:languages @=@ dnet:languages @=@ mwr @=@ Marwari +dnet:languages @=@ dnet:languages @=@ mas @=@ Masai +dnet:languages @=@ dnet:languages @=@ myn @=@ Mayan +dnet:languages @=@ dnet:languages @=@ men @=@ Mende +dnet:languages @=@ dnet:languages @=@ mic @=@ Micmac +dnet:languages @=@ dnet:languages @=@ dum @=@ Middle Dutch +dnet:languages @=@ dnet:languages @=@ enm @=@ Middle English +dnet:languages @=@ dnet:languages @=@ frm @=@ Middle French +dnet:languages @=@ dnet:languages @=@ gmh @=@ Middle High German +dnet:languages @=@ dnet:languages @=@ mga @=@ Middle Irish +dnet:languages @=@ dnet:languages @=@ min @=@ Minangkabau +dnet:languages @=@ dnet:languages @=@ mis @=@ Miscellaneous +dnet:languages @=@ dnet:languages @=@ moh @=@ Mohawk +dnet:languages @=@ dnet:languages @=@ mol @=@ Moldavian +dnet:languages @=@ dnet:languages @=@ mkh @=@ Mon-Kmer +dnet:languages @=@ dnet:languages @=@ lol @=@ Mongo +dnet:languages @=@ dnet:languages @=@ mon @=@ Mongolian +dnet:languages @=@ dnet:languages @=@ mos @=@ Mossi +dnet:languages @=@ dnet:languages @=@ mul @=@ Multiple languages +dnet:languages @=@ dnet:languages @=@ mun @=@ Munda +dnet:languages @=@ dnet:languages @=@ nau @=@ Nauru +dnet:languages @=@ dnet:languages @=@ nav @=@ Navajo; Navaho +dnet:languages @=@ dnet:languages @=@ nde @=@ Ndebele, North +dnet:languages @=@ dnet:languages @=@ nbl @=@ Ndebele, South +dnet:languages @=@ dnet:languages @=@ ndo @=@ Ndonga +dnet:languages @=@ dnet:languages @=@ nep @=@ Nepali +dnet:languages @=@ dnet:languages @=@ new @=@ Newari +dnet:languages @=@ dnet:languages @=@ nic @=@ Niger-Kordofanian +dnet:languages @=@ dnet:languages @=@ ssa @=@ Nilo-Saharan +dnet:languages @=@ dnet:languages @=@ niu @=@ Niuean +dnet:languages @=@ dnet:languages @=@ non @=@ Norse +dnet:languages @=@ dnet:languages @=@ nai @=@ North American Indian +dnet:languages @=@ dnet:languages @=@ sme @=@ Northern Sami +dnet:languages @=@ dnet:languages @=@ nor @=@ Norwegian +dnet:languages @=@ dnet:languages @=@ nno @=@ Norwegian Nynorsk; Nynorsk, Norwegian +dnet:languages @=@ dnet:languages @=@ nub @=@ Nubian +dnet:languages @=@ dnet:languages @=@ nym @=@ Nyamwezi +dnet:languages @=@ dnet:languages @=@ nyn @=@ Nyankole +dnet:languages @=@ dnet:languages @=@ nyo @=@ Nyoro +dnet:languages @=@ dnet:languages @=@ nzi @=@ Nzima +dnet:languages @=@ dnet:languages @=@ oci @=@ Occitan (post 1500); Provençal +dnet:languages @=@ dnet:languages @=@ oji @=@ Ojibwa +dnet:languages @=@ dnet:languages @=@ ang @=@ Old English +dnet:languages @=@ dnet:languages @=@ fro @=@ Old French +dnet:languages @=@ dnet:languages @=@ goh @=@ Old High German +dnet:languages @=@ dnet:languages @=@ ori @=@ Oriya +dnet:languages @=@ dnet:languages @=@ orm @=@ Oromo +dnet:languages @=@ dnet:languages @=@ osa @=@ Osage +dnet:languages @=@ dnet:languages @=@ oss @=@ Ossetian; Ossetic +dnet:languages @=@ dnet:languages @=@ oto @=@ Otomian +dnet:languages @=@ dnet:languages @=@ ota @=@ Ottoman +dnet:languages @=@ dnet:languages @=@ pal @=@ Pahlavi +dnet:languages @=@ dnet:languages @=@ pau @=@ Palauan +dnet:languages @=@ dnet:languages @=@ pli @=@ Pali +dnet:languages @=@ dnet:languages @=@ pam @=@ Pampanga +dnet:languages @=@ dnet:languages @=@ pag @=@ Pangasinan +dnet:languages @=@ dnet:languages @=@ pan @=@ Panjabi; Punjabi +dnet:languages @=@ dnet:languages @=@ pap @=@ Papiamento +dnet:languages @=@ dnet:languages @=@ paa @=@ Papuan-Australian +dnet:languages @=@ dnet:languages @=@ fas/per @=@ Persian +dnet:languages @=@ dnet:languages @=@ peo @=@ Persian, Old (ca 600 - 400 B.C.) +dnet:languages @=@ dnet:languages @=@ phn @=@ Phoenician +dnet:languages @=@ dnet:languages @=@ pol @=@ Polish +dnet:languages @=@ dnet:languages @=@ pon @=@ Ponape +dnet:languages @=@ dnet:languages @=@ por @=@ Portuguese +dnet:languages @=@ dnet:languages @=@ cpp @=@ Portuguese-based Creoles and Pidgins +dnet:languages @=@ dnet:languages @=@ pra @=@ Prakrit +dnet:languages @=@ dnet:languages @=@ pro @=@ Provencal +dnet:languages @=@ dnet:languages @=@ pus @=@ Pushto +dnet:languages @=@ dnet:languages @=@ que @=@ Quechua +dnet:languages @=@ dnet:languages @=@ roh @=@ Raeto-Romance +dnet:languages @=@ dnet:languages @=@ raj @=@ Rajasthani +dnet:languages @=@ dnet:languages @=@ rar @=@ Rarotongan +dnet:languages @=@ dnet:languages @=@ roa @=@ Romance +dnet:languages @=@ dnet:languages @=@ ron/rum @=@ Romanian +dnet:languages @=@ dnet:languages @=@ rom @=@ Romany +dnet:languages @=@ dnet:languages @=@ run @=@ Rundi +dnet:languages @=@ dnet:languages @=@ rus @=@ Russian +dnet:languages @=@ dnet:languages @=@ sal @=@ Salishan +dnet:languages @=@ dnet:languages @=@ sam @=@ Samaritan +dnet:languages @=@ dnet:languages @=@ smi @=@ Sami +dnet:languages @=@ dnet:languages @=@ smo @=@ Samoan +dnet:languages @=@ dnet:languages @=@ sad @=@ Sandawe +dnet:languages @=@ dnet:languages @=@ sag @=@ Sango +dnet:languages @=@ dnet:languages @=@ san @=@ Sanskrit +dnet:languages @=@ dnet:languages @=@ srd @=@ Sardinian +dnet:languages @=@ dnet:languages @=@ sco @=@ Scots +dnet:languages @=@ dnet:languages @=@ sel @=@ Selkup +dnet:languages @=@ dnet:languages @=@ sem @=@ Semitic +dnet:languages @=@ dnet:languages @=@ srp @=@ Serbian +dnet:languages @=@ dnet:languages @=@ scr @=@ Serbo-Croatian +dnet:languages @=@ dnet:languages @=@ srr @=@ Serer +dnet:languages @=@ dnet:languages @=@ shn @=@ Shan +dnet:languages @=@ dnet:languages @=@ sna @=@ Shona +dnet:languages @=@ dnet:languages @=@ iii @=@ Sichuan Yi +dnet:languages @=@ dnet:languages @=@ sid @=@ Sidamo +dnet:languages @=@ dnet:languages @=@ bla @=@ Siksika +dnet:languages @=@ dnet:languages @=@ snd @=@ Sindhi +dnet:languages @=@ dnet:languages @=@ sin @=@ Sinhala; Sinhalese +dnet:languages @=@ dnet:languages @=@ sit @=@ Sino-Tibetan +dnet:languages @=@ dnet:languages @=@ sio @=@ Siouan +dnet:languages @=@ dnet:languages @=@ sla @=@ Slavic +dnet:languages @=@ dnet:languages @=@ slk/slo @=@ Slovak +dnet:languages @=@ dnet:languages @=@ slv @=@ Slovenian +dnet:languages @=@ dnet:languages @=@ sog @=@ Sogdian +dnet:languages @=@ dnet:languages @=@ som @=@ Somali +dnet:languages @=@ dnet:languages @=@ son @=@ Songhai +dnet:languages @=@ dnet:languages @=@ wen @=@ Sorbian +dnet:languages @=@ dnet:languages @=@ nso @=@ Sotho +dnet:languages @=@ dnet:languages @=@ sot @=@ Sotho, Southern +dnet:languages @=@ dnet:languages @=@ sai @=@ South American Indian +dnet:languages @=@ dnet:languages @=@ esl/spa @=@ Spanish +dnet:languages @=@ dnet:languages @=@ spa @=@ Spanish; Castilian +dnet:languages @=@ dnet:languages @=@ suk @=@ Sukuma +dnet:languages @=@ dnet:languages @=@ sux @=@ Sumerian +dnet:languages @=@ dnet:languages @=@ sun @=@ Sundanese +dnet:languages @=@ dnet:languages @=@ sus @=@ Susu +dnet:languages @=@ dnet:languages @=@ swa @=@ Swahili +dnet:languages @=@ dnet:languages @=@ ssw @=@ Swati +dnet:languages @=@ dnet:languages @=@ swe @=@ Swedish +dnet:languages @=@ dnet:languages @=@ syr @=@ Syriac +dnet:languages @=@ dnet:languages @=@ tgl @=@ Tagalog +dnet:languages @=@ dnet:languages @=@ tah @=@ Tahitian +dnet:languages @=@ dnet:languages @=@ tgk @=@ Tajik +dnet:languages @=@ dnet:languages @=@ tmh @=@ Tamashek +dnet:languages @=@ dnet:languages @=@ tam @=@ Tamil +dnet:languages @=@ dnet:languages @=@ tat @=@ Tatar +dnet:languages @=@ dnet:languages @=@ tel @=@ Telugu +dnet:languages @=@ dnet:languages @=@ ter @=@ Tereno +dnet:languages @=@ dnet:languages @=@ tha @=@ Thai +dnet:languages @=@ dnet:languages @=@ bod/tib @=@ Tibetan +dnet:languages @=@ dnet:languages @=@ tig @=@ Tigre +dnet:languages @=@ dnet:languages @=@ tir @=@ Tigrinya +dnet:languages @=@ dnet:languages @=@ tem @=@ Timne +dnet:languages @=@ dnet:languages @=@ tiv @=@ Tivi +dnet:languages @=@ dnet:languages @=@ tli @=@ Tlingit +dnet:languages @=@ dnet:languages @=@ ton @=@ Tonga (Tonga Islands) +dnet:languages @=@ dnet:languages @=@ tog @=@ Tonga(Nyasa) +dnet:languages @=@ dnet:languages @=@ tru @=@ Truk +dnet:languages @=@ dnet:languages @=@ tsi @=@ Tsimshian +dnet:languages @=@ dnet:languages @=@ tso @=@ Tsonga +dnet:languages @=@ dnet:languages @=@ tsn @=@ Tswana +dnet:languages @=@ dnet:languages @=@ tum @=@ Tumbuka +dnet:languages @=@ dnet:languages @=@ tur @=@ Turkish +dnet:languages @=@ dnet:languages @=@ tuk @=@ Turkmen +dnet:languages @=@ dnet:languages @=@ tyv @=@ Tuvinian +dnet:languages @=@ dnet:languages @=@ twi @=@ Twi +dnet:languages @=@ dnet:languages @=@ uga @=@ Ugaritic +dnet:languages @=@ dnet:languages @=@ uig @=@ Uighur; Uyghur +dnet:languages @=@ dnet:languages @=@ ukr @=@ Ukrainian +dnet:languages @=@ dnet:languages @=@ umb @=@ Umbundu +dnet:languages @=@ dnet:languages @=@ und @=@ Undetermined +dnet:languages @=@ dnet:languages @=@ urd @=@ Urdu +dnet:languages @=@ dnet:languages @=@ uzb @=@ Uzbek +dnet:languages @=@ dnet:languages @=@ vai @=@ Vai +dnet:languages @=@ dnet:languages @=@ ven @=@ Venda +dnet:languages @=@ dnet:languages @=@ vie @=@ Vietnamese +dnet:languages @=@ dnet:languages @=@ vol @=@ Volapük +dnet:languages @=@ dnet:languages @=@ vot @=@ Votic +dnet:languages @=@ dnet:languages @=@ wak @=@ Wakashan +dnet:languages @=@ dnet:languages @=@ wal @=@ Walamo +dnet:languages @=@ dnet:languages @=@ wln @=@ Walloon +dnet:languages @=@ dnet:languages @=@ war @=@ Waray +dnet:languages @=@ dnet:languages @=@ was @=@ Washo +dnet:languages @=@ dnet:languages @=@ cym/wel @=@ Welsh +dnet:languages @=@ dnet:languages @=@ wol @=@ Wolof +dnet:languages @=@ dnet:languages @=@ xho @=@ Xhosa +dnet:languages @=@ dnet:languages @=@ sah @=@ Yakut +dnet:languages @=@ dnet:languages @=@ yao @=@ Yao +dnet:languages @=@ dnet:languages @=@ yap @=@ Yap +dnet:languages @=@ dnet:languages @=@ yid @=@ Yiddish +dnet:languages @=@ dnet:languages @=@ yor @=@ Yoruba +dnet:languages @=@ dnet:languages @=@ zap @=@ Zapotec +dnet:languages @=@ dnet:languages @=@ zen @=@ Zenaga +dnet:languages @=@ dnet:languages @=@ zha @=@ Zhuang; Chuang +dnet:languages @=@ dnet:languages @=@ zul @=@ Zulu +dnet:languages @=@ dnet:languages @=@ zun @=@ Zuni +dnet:languages @=@ dnet:languages @=@ sga @=@ old Irish +nsf:contractTypes @=@ NSF Contract Types @=@ BOA/Task Order @=@ BOA/Task Order +nsf:contractTypes @=@ NSF Contract Types @=@ Continuing grant @=@ Continuing grant +nsf:contractTypes @=@ NSF Contract Types @=@ Contract @=@ Contract +nsf:contractTypes @=@ NSF Contract Types @=@ Contract Interagency Agreement @=@ Contract Interagency Agreement +nsf:contractTypes @=@ NSF Contract Types @=@ Cooperative Agreement @=@ Cooperative Agreement +nsf:contractTypes @=@ NSF Contract Types @=@ Fellowship @=@ Fellowship +nsf:contractTypes @=@ NSF Contract Types @=@ Fixed Price Award @=@ Fixed Price Award +nsf:contractTypes @=@ NSF Contract Types @=@ GAA @=@ GAA +nsf:contractTypes @=@ NSF Contract Types @=@ Interagency Agreement @=@ Interagency Agreement +nsf:contractTypes @=@ NSF Contract Types @=@ Intergovernmental Personnel Award @=@ Intergovernmental Personnel Award +nsf:contractTypes @=@ NSF Contract Types @=@ Personnel Agreement @=@ Personnel Agreement +nsf:contractTypes @=@ NSF Contract Types @=@ Standard Grant @=@ Standard Grant +ec:funding_relations @=@ ec:funding_relations @=@ ec:hasframeworkprogram @=@ hasframeworkprogram +ec:funding_relations @=@ ec:funding_relations @=@ ec:hasprogram @=@ hasprogram +ec:funding_relations @=@ ec:funding_relations @=@ ec:hasspecificprogram @=@ hasspecificprogram +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ UNKNOWN @=@ UNKNOWN +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ collection @=@ collection +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ dataset @=@ dataset +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ event @=@ event +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ film @=@ film +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ image @=@ image +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ interactiveResource @=@ interactiveResource +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ model @=@ model +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ physicalObject @=@ physicalObject +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ service @=@ service +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ software @=@ software +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ sound @=@ sound +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ text @=@ text +dnet:dataCite_resource @=@ dnet:dataCite_resource @=@ clinicalTrial @=@ Clinical trial +dnet:dataCite_title @=@ dnet:dataCite_title @=@ alternative title @=@ alternative title +dnet:dataCite_title @=@ dnet:dataCite_title @=@ main title @=@ main title +dnet:dataCite_title @=@ dnet:dataCite_title @=@ subtitle @=@ subtitle +dnet:dataCite_title @=@ dnet:dataCite_title @=@ translated title @=@ translated title +datacite:relation_typologies @=@ datacite:relation_typologies @=@ IsCitedBy @=@ IsCitedBy +datacite:relation_typologies @=@ datacite:relation_typologies @=@ IsNewVersionOf @=@ IsNewVersionOf +datacite:relation_typologies @=@ datacite:relation_typologies @=@ IsPartOf @=@ IsPartOf +datacite:relation_typologies @=@ datacite:relation_typologies @=@ IsPreviousVersionOf @=@ IsPreviousVersionOf +datacite:relation_typologies @=@ datacite:relation_typologies @=@ IsReferencedBy @=@ IsReferencedBy +datacite:relation_typologies @=@ datacite:relation_typologies @=@ References @=@ References +datacite:relation_typologies @=@ datacite:relation_typologies @=@ UNKNOWN @=@ UNKNOWN +dnet:result_typologies @=@ dnet:result_typologies @=@ dataset @=@ dataset +dnet:result_typologies @=@ dnet:result_typologies @=@ other @=@ other +dnet:result_typologies @=@ dnet:result_typologies @=@ publication @=@ publication +dnet:result_typologies @=@ dnet:result_typologies @=@ software @=@ software +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-ADG @=@ Advanced Grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ BBI-CSA @=@ Bio-based Industries Coordination and Support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ BBI-IA-DEMO @=@ Bio-based Industries Innovation action - Demonstration +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ BBI-IA-FLAG @=@ Bio-based Industries Innovation action - Flagship +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ BBI-RIA @=@ Bio-based Industries Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-IF-EF-CAR @=@ CAR – Career Restart panel +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ COFUND-EJP @=@ COFUND (European Joint Programme) +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ COFUND-PCP @=@ COFUND (PCP) +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ COFUND-PPI @=@ COFUND (PPI) +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ CS2-CSA @=@ CS2 Coordination and Support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ CS2-IA @=@ CS2 Innovation Action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ CS2-RIA @=@ CS2 Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ CSA-LS @=@ CSA Lump sum +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-COG @=@ Consolidator Grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ FCH2-CSA @=@ Coordination & support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ CSA @=@ Coordination and support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-COFUND-DP @=@ Doctoral programmes +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ECSEL-CSA @=@ ECSEL Coordination & Support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ECSEL-IA @=@ ECSEL Innovation Action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ECSEL-RIA @=@ ECSEL Research and Innovation Actions +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERA-NET-Cofund @=@ ERA-NET Cofund +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-POC-LS @=@ ERC Proof of Concept Lump Sum Pilot +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-SyG @=@ ERC Synergy Grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-LVG @=@ ERC low value grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ H2020-EEN-SGA @=@ Enterprise Europe Network - Specific Grant Agreement +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-ITN-EID @=@ European Industrial Doctorates +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-ITN-EJD @=@ European Joint Doctorates +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-ITN-ETN @=@ European Training Networks +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ FCH2-IA @=@ FCH2 Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ FCH2-RIA @=@ FCH2 Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-COFUND-FP @=@ Fellowship programmes +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-IF-GF @=@ Global Fellowships +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ IMI2-CSA @=@ IMI2 Coordination & support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ IMI2-RIA @=@ IMI2 Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ Shift2Rail-IA-LS @=@ Innovation Action Lump-Sum +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ IA-LS @=@ Innovation Action Lump-Sum +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ IA @=@ Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ Shift2Rail-IA @=@ Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ PCP @=@ Pre-Commercial Procurement +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-POC @=@ Proof of Concept Grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ PPI @=@ Public Procurement of Innovative Solutions +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-IF-EF-RI @=@ RI – Reintegration panel +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-RISE @=@ RISE +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ Shift2Rail-RIA-LS @=@ Research and Innovation Action Lump-Sum +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ Shift2Rail-RIA @=@ Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ RIA @=@ Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ RIA-LS @=@ Research and Innovation action Lump Sum +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SESAR-CSA @=@ SESAR: Coordination and Support Action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SESAR-IA @=@ SESAR: Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SESAR-RIA @=@ SESAR: Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SGA-RIA @=@ SGA Research and Innovation action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SME-2b @=@ SME Instrument (grant only and blended finance) +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SME-1 @=@ SME instrument phase 1 +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SME-2 @=@ SME instrument phase 2 +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ Shift2Rail-CSA @=@ Shift2Rail - Coordination and Support action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-IF-EF-SE @=@ Society and Enterprise panel +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ SGA-CSA @=@ Specific Grant agreement and Coordination and Support Action +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-IF-EF-ST @=@ Standard EF +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ ERC-STG @=@ Starting Grant +ec:h2020toas @=@ Horizon 2020 - Type of Actions @=@ MSCA-SNLS @=@ Grant to identified beneficiary - Coordination and support actions (MSCA-Special Needs lump sum) +wt:contractTypes @=@ wt:contractTypes @=@ UNKNOWN @=@ UNKNOWN +dnet:countries @=@ dnet:countries @=@ AF @=@ Afghanistan +dnet:countries @=@ dnet:countries @=@ AL @=@ Albania +dnet:countries @=@ dnet:countries @=@ DZ @=@ Algeria +dnet:countries @=@ dnet:countries @=@ AS @=@ American Samoa +dnet:countries @=@ dnet:countries @=@ AD @=@ Andorra +dnet:countries @=@ dnet:countries @=@ AO @=@ Angola +dnet:countries @=@ dnet:countries @=@ AI @=@ Anguilla +dnet:countries @=@ dnet:countries @=@ AQ @=@ Antarctica +dnet:countries @=@ dnet:countries @=@ AG @=@ Antigua and Barbuda +dnet:countries @=@ dnet:countries @=@ AR @=@ Argentina +dnet:countries @=@ dnet:countries @=@ AM @=@ Armenia +dnet:countries @=@ dnet:countries @=@ AW @=@ Aruba +dnet:countries @=@ dnet:countries @=@ AU @=@ Australia +dnet:countries @=@ dnet:countries @=@ AT @=@ Austria +dnet:countries @=@ dnet:countries @=@ AZ @=@ Azerbaijan +dnet:countries @=@ dnet:countries @=@ BS @=@ Bahamas +dnet:countries @=@ dnet:countries @=@ BH @=@ Bahrain +dnet:countries @=@ dnet:countries @=@ BD @=@ Bangladesh +dnet:countries @=@ dnet:countries @=@ BB @=@ Barbados +dnet:countries @=@ dnet:countries @=@ BY @=@ Belarus +dnet:countries @=@ dnet:countries @=@ BE @=@ Belgium +dnet:countries @=@ dnet:countries @=@ BZ @=@ Belize +dnet:countries @=@ dnet:countries @=@ BJ @=@ Benin +dnet:countries @=@ dnet:countries @=@ BM @=@ Bermuda +dnet:countries @=@ dnet:countries @=@ BT @=@ Bhutan +dnet:countries @=@ dnet:countries @=@ BO @=@ Bolivia +dnet:countries @=@ dnet:countries @=@ BQ @=@ Bonaire, Sint Eustatius and Saba +dnet:countries @=@ dnet:countries @=@ BA @=@ Bosnia and Herzegovina +dnet:countries @=@ dnet:countries @=@ BW @=@ Botswana +dnet:countries @=@ dnet:countries @=@ BV @=@ Bouvet Island +dnet:countries @=@ dnet:countries @=@ BR @=@ Brazil +dnet:countries @=@ dnet:countries @=@ IO @=@ British Indian Ocean Territory +dnet:countries @=@ dnet:countries @=@ BN @=@ Brunei Darussalam +dnet:countries @=@ dnet:countries @=@ BG @=@ Bulgaria +dnet:countries @=@ dnet:countries @=@ BF @=@ Burkina Faso +dnet:countries @=@ dnet:countries @=@ BI @=@ Burundi +dnet:countries @=@ dnet:countries @=@ KH @=@ Cambodia +dnet:countries @=@ dnet:countries @=@ CM @=@ Cameroon +dnet:countries @=@ dnet:countries @=@ CA @=@ Canada +dnet:countries @=@ dnet:countries @=@ CV @=@ Cape Verde +dnet:countries @=@ dnet:countries @=@ KY @=@ Cayman Islands +dnet:countries @=@ dnet:countries @=@ CF @=@ Central African Republic +dnet:countries @=@ dnet:countries @=@ TD @=@ Chad +dnet:countries @=@ dnet:countries @=@ CL @=@ Chile +dnet:countries @=@ dnet:countries @=@ CN @=@ China (People's Republic of) +dnet:countries @=@ dnet:countries @=@ CX @=@ Christmas Island +dnet:countries @=@ dnet:countries @=@ CC @=@ Cocos (Keeling) Islands +dnet:countries @=@ dnet:countries @=@ CO @=@ Colombia +dnet:countries @=@ dnet:countries @=@ KM @=@ Comoros +dnet:countries @=@ dnet:countries @=@ CG @=@ Congo +dnet:countries @=@ dnet:countries @=@ CD @=@ Congo (Democratic Republic of) +dnet:countries @=@ dnet:countries @=@ CK @=@ Cook Islands +dnet:countries @=@ dnet:countries @=@ CR @=@ Costa Rica +dnet:countries @=@ dnet:countries @=@ CI @=@ Cote d'Ivoire +dnet:countries @=@ dnet:countries @=@ HR @=@ Croatia +dnet:countries @=@ dnet:countries @=@ CU @=@ Cuba +dnet:countries @=@ dnet:countries @=@ CW @=@ Curaçao +dnet:countries @=@ dnet:countries @=@ CY @=@ Cyprus +dnet:countries @=@ dnet:countries @=@ CZ @=@ Czech Republic +dnet:countries @=@ dnet:countries @=@ DK @=@ Denmark +dnet:countries @=@ dnet:countries @=@ DJ @=@ Djibouti +dnet:countries @=@ dnet:countries @=@ DM @=@ Dominica +dnet:countries @=@ dnet:countries @=@ DO @=@ Dominican Republic +dnet:countries @=@ dnet:countries @=@ EC @=@ Ecuador +dnet:countries @=@ dnet:countries @=@ EG @=@ Egypt +dnet:countries @=@ dnet:countries @=@ SV @=@ El Salvador +dnet:countries @=@ dnet:countries @=@ GQ @=@ Equatorial Guinea +dnet:countries @=@ dnet:countries @=@ ER @=@ Eritrea +dnet:countries @=@ dnet:countries @=@ EE @=@ Estonia +dnet:countries @=@ dnet:countries @=@ ET @=@ Ethiopia +dnet:countries @=@ dnet:countries @=@ EU @=@ European Union +dnet:countries @=@ dnet:countries @=@ FK @=@ Falkland Islands (Malvinas) +dnet:countries @=@ dnet:countries @=@ FO @=@ Faroe Islands +dnet:countries @=@ dnet:countries @=@ FJ @=@ Fiji +dnet:countries @=@ dnet:countries @=@ FI @=@ Finland +dnet:countries @=@ dnet:countries @=@ MK @=@ Former Yugoslav Republic of Macedonia +dnet:countries @=@ dnet:countries @=@ FR @=@ France +dnet:countries @=@ dnet:countries @=@ GF @=@ French Guiana +dnet:countries @=@ dnet:countries @=@ PF @=@ French Polynesia +dnet:countries @=@ dnet:countries @=@ TF @=@ French Southern Territories +dnet:countries @=@ dnet:countries @=@ GA @=@ Gabon +dnet:countries @=@ dnet:countries @=@ GM @=@ Gambia +dnet:countries @=@ dnet:countries @=@ GE @=@ Georgia +dnet:countries @=@ dnet:countries @=@ DE @=@ Germany +dnet:countries @=@ dnet:countries @=@ GH @=@ Ghana +dnet:countries @=@ dnet:countries @=@ GI @=@ Gibraltar +dnet:countries @=@ dnet:countries @=@ GR @=@ Greece +dnet:countries @=@ dnet:countries @=@ GL @=@ Greenland +dnet:countries @=@ dnet:countries @=@ GD @=@ Grenada +dnet:countries @=@ dnet:countries @=@ GP @=@ Guadeloupe +dnet:countries @=@ dnet:countries @=@ GU @=@ Guam +dnet:countries @=@ dnet:countries @=@ GT @=@ Guatemala +dnet:countries @=@ dnet:countries @=@ GG @=@ Guernsey +dnet:countries @=@ dnet:countries @=@ GN @=@ Guinea +dnet:countries @=@ dnet:countries @=@ GW @=@ Guinea-Bissau +dnet:countries @=@ dnet:countries @=@ GY @=@ Guyana +dnet:countries @=@ dnet:countries @=@ HT @=@ Haiti +dnet:countries @=@ dnet:countries @=@ HM @=@ Heard Island and McDonald Islands +dnet:countries @=@ dnet:countries @=@ VA @=@ Holy See (Vatican City State) +dnet:countries @=@ dnet:countries @=@ HN @=@ Honduras +dnet:countries @=@ dnet:countries @=@ HK @=@ Hong Kong +dnet:countries @=@ dnet:countries @=@ HU @=@ Hungary +dnet:countries @=@ dnet:countries @=@ IS @=@ Iceland +dnet:countries @=@ dnet:countries @=@ IN @=@ India +dnet:countries @=@ dnet:countries @=@ ID @=@ Indonesia +dnet:countries @=@ dnet:countries @=@ IR @=@ Iran (Islamic Republic of) +dnet:countries @=@ dnet:countries @=@ IQ @=@ Iraq +dnet:countries @=@ dnet:countries @=@ IE @=@ Ireland +dnet:countries @=@ dnet:countries @=@ IM @=@ Isle of Man +dnet:countries @=@ dnet:countries @=@ IL @=@ Israel +dnet:countries @=@ dnet:countries @=@ IT @=@ Italy +dnet:countries @=@ dnet:countries @=@ JM @=@ Jamaica +dnet:countries @=@ dnet:countries @=@ JP @=@ Japan +dnet:countries @=@ dnet:countries @=@ JE @=@ Jersey +dnet:countries @=@ dnet:countries @=@ JO @=@ Jordan +dnet:countries @=@ dnet:countries @=@ KZ @=@ Kazakhstan +dnet:countries @=@ dnet:countries @=@ KE @=@ Kenya +dnet:countries @=@ dnet:countries @=@ KI @=@ Kiribati +dnet:countries @=@ dnet:countries @=@ KR @=@ Korea (Republic of) +dnet:countries @=@ dnet:countries @=@ KP @=@ Korea, Democatric People's Republic of +dnet:countries @=@ dnet:countries @=@ XK @=@ Kosovo * UN resolution +dnet:countries @=@ dnet:countries @=@ KW @=@ Kuwait +dnet:countries @=@ dnet:countries @=@ KG @=@ Kyrgyzstan +dnet:countries @=@ dnet:countries @=@ LA @=@ Lao (People's Democratic Republic) +dnet:countries @=@ dnet:countries @=@ LV @=@ Latvia +dnet:countries @=@ dnet:countries @=@ LB @=@ Lebanon +dnet:countries @=@ dnet:countries @=@ LS @=@ Lesotho +dnet:countries @=@ dnet:countries @=@ LR @=@ Liberia +dnet:countries @=@ dnet:countries @=@ LY @=@ Libyan Arab Jamahiriya +dnet:countries @=@ dnet:countries @=@ LI @=@ Liechtenstein +dnet:countries @=@ dnet:countries @=@ LT @=@ Lithuania +dnet:countries @=@ dnet:countries @=@ LU @=@ Luxembourg +dnet:countries @=@ dnet:countries @=@ MO @=@ Macao +dnet:countries @=@ dnet:countries @=@ MG @=@ Madagascar +dnet:countries @=@ dnet:countries @=@ MW @=@ Malawi +dnet:countries @=@ dnet:countries @=@ MY @=@ Malaysia +dnet:countries @=@ dnet:countries @=@ MV @=@ Maldives +dnet:countries @=@ dnet:countries @=@ ML @=@ Mali +dnet:countries @=@ dnet:countries @=@ MT @=@ Malta +dnet:countries @=@ dnet:countries @=@ MH @=@ Marshall Islands +dnet:countries @=@ dnet:countries @=@ MQ @=@ Martinique +dnet:countries @=@ dnet:countries @=@ MR @=@ Mauritania +dnet:countries @=@ dnet:countries @=@ MU @=@ Mauritius +dnet:countries @=@ dnet:countries @=@ YT @=@ Mayotte +dnet:countries @=@ dnet:countries @=@ MX @=@ Mexico +dnet:countries @=@ dnet:countries @=@ FM @=@ Micronesia, Federated States of +dnet:countries @=@ dnet:countries @=@ MD @=@ Moldova (Republic of) +dnet:countries @=@ dnet:countries @=@ MN @=@ Mongolia +dnet:countries @=@ dnet:countries @=@ ME @=@ Montenegro +dnet:countries @=@ dnet:countries @=@ MS @=@ Montserrat +dnet:countries @=@ dnet:countries @=@ MA @=@ Morocco +dnet:countries @=@ dnet:countries @=@ MZ @=@ Mozambique +dnet:countries @=@ dnet:countries @=@ MM @=@ Myanmar +dnet:countries @=@ dnet:countries @=@ NA @=@ Namibia +dnet:countries @=@ dnet:countries @=@ NR @=@ Nauru +dnet:countries @=@ dnet:countries @=@ NP @=@ Nepal +dnet:countries @=@ dnet:countries @=@ NL @=@ Netherlands +dnet:countries @=@ dnet:countries @=@ AN @=@ Netherlands Antilles +dnet:countries @=@ dnet:countries @=@ NC @=@ New Caledonia +dnet:countries @=@ dnet:countries @=@ NZ @=@ New Zealand +dnet:countries @=@ dnet:countries @=@ NI @=@ Nicaragua +dnet:countries @=@ dnet:countries @=@ NE @=@ Niger +dnet:countries @=@ dnet:countries @=@ NG @=@ Nigeria +dnet:countries @=@ dnet:countries @=@ NU @=@ Niue +dnet:countries @=@ dnet:countries @=@ NF @=@ Norfolk Island +dnet:countries @=@ dnet:countries @=@ MP @=@ Northern Mariana Islands +dnet:countries @=@ dnet:countries @=@ NO @=@ Norway +dnet:countries @=@ dnet:countries @=@ OC @=@ Oceania +dnet:countries @=@ dnet:countries @=@ OM @=@ Oman +dnet:countries @=@ dnet:countries @=@ PK @=@ Pakistan +dnet:countries @=@ dnet:countries @=@ PW @=@ Palau +dnet:countries @=@ dnet:countries @=@ PS @=@ Palestinian-administered areas +dnet:countries @=@ dnet:countries @=@ PA @=@ Panama +dnet:countries @=@ dnet:countries @=@ PG @=@ Papua New Guinea +dnet:countries @=@ dnet:countries @=@ PY @=@ Paraguay +dnet:countries @=@ dnet:countries @=@ PE @=@ Peru +dnet:countries @=@ dnet:countries @=@ PH @=@ Philippines +dnet:countries @=@ dnet:countries @=@ PN @=@ Pitcairn +dnet:countries @=@ dnet:countries @=@ PL @=@ Poland +dnet:countries @=@ dnet:countries @=@ PT @=@ Portugal +dnet:countries @=@ dnet:countries @=@ PR @=@ Puerto Rico +dnet:countries @=@ dnet:countries @=@ QA @=@ Qatar +dnet:countries @=@ dnet:countries @=@ RO @=@ Romania +dnet:countries @=@ dnet:countries @=@ RU @=@ Russian Federation +dnet:countries @=@ dnet:countries @=@ RW @=@ Rwanda +dnet:countries @=@ dnet:countries @=@ RE @=@ Réunion +dnet:countries @=@ dnet:countries @=@ SH @=@ Saint Helena, Ascension and Tristan da Cunha +dnet:countries @=@ dnet:countries @=@ KN @=@ Saint Kitts and Nevis +dnet:countries @=@ dnet:countries @=@ LC @=@ Saint Lucia +dnet:countries @=@ dnet:countries @=@ MF @=@ Saint Martin (French Part) +dnet:countries @=@ dnet:countries @=@ PM @=@ Saint Pierre and Miquelon +dnet:countries @=@ dnet:countries @=@ VC @=@ Saint Vincent and the Grenadines +dnet:countries @=@ dnet:countries @=@ BL @=@ Saint-Barthélemy +dnet:countries @=@ dnet:countries @=@ WS @=@ Samoa +dnet:countries @=@ dnet:countries @=@ SM @=@ San Marino +dnet:countries @=@ dnet:countries @=@ SA @=@ Saudi Arabia +dnet:countries @=@ dnet:countries @=@ SN @=@ Senegal +dnet:countries @=@ dnet:countries @=@ RS @=@ Serbia +dnet:countries @=@ dnet:countries @=@ CS @=@ Serbia and Montenegro +dnet:countries @=@ dnet:countries @=@ SC @=@ Seychelles +dnet:countries @=@ dnet:countries @=@ SL @=@ Sierra Leone +dnet:countries @=@ dnet:countries @=@ SG @=@ Singapore +dnet:countries @=@ dnet:countries @=@ SX @=@ Sint Maarten (Dutch Part) +dnet:countries @=@ dnet:countries @=@ SK @=@ Slovakia +dnet:countries @=@ dnet:countries @=@ SI @=@ Slovenia +dnet:countries @=@ dnet:countries @=@ SB @=@ Solomon Islands +dnet:countries @=@ dnet:countries @=@ SO @=@ Somalia +dnet:countries @=@ dnet:countries @=@ ZA @=@ South Africa +dnet:countries @=@ dnet:countries @=@ GS @=@ South Georgia and the South Sandwich Islands +dnet:countries @=@ dnet:countries @=@ SS @=@ South Sudan +dnet:countries @=@ dnet:countries @=@ ES @=@ Spain +dnet:countries @=@ dnet:countries @=@ LK @=@ Sri Lanka +dnet:countries @=@ dnet:countries @=@ SD @=@ Sudan +dnet:countries @=@ dnet:countries @=@ SR @=@ Suriname +dnet:countries @=@ dnet:countries @=@ SJ @=@ Svalbard and Jan Mayen +dnet:countries @=@ dnet:countries @=@ SZ @=@ Swaziland +dnet:countries @=@ dnet:countries @=@ SE @=@ Sweden +dnet:countries @=@ dnet:countries @=@ CH @=@ Switzerland +dnet:countries @=@ dnet:countries @=@ SY @=@ Syrian Arab Republic +dnet:countries @=@ dnet:countries @=@ ST @=@ São Tomé and Príncipe +dnet:countries @=@ dnet:countries @=@ TW @=@ Taiwan +dnet:countries @=@ dnet:countries @=@ TJ @=@ Tajikistan +dnet:countries @=@ dnet:countries @=@ TZ @=@ Tanzania (United Republic of) +dnet:countries @=@ dnet:countries @=@ TH @=@ Thailand +dnet:countries @=@ dnet:countries @=@ TL @=@ Timor-Leste +dnet:countries @=@ dnet:countries @=@ TG @=@ Togo +dnet:countries @=@ dnet:countries @=@ TK @=@ Tokelau +dnet:countries @=@ dnet:countries @=@ TO @=@ Tonga +dnet:countries @=@ dnet:countries @=@ TT @=@ Trinidad and Tobago +dnet:countries @=@ dnet:countries @=@ TN @=@ Tunisia +dnet:countries @=@ dnet:countries @=@ TR @=@ Turkey +dnet:countries @=@ dnet:countries @=@ TM @=@ Turkmenistan +dnet:countries @=@ dnet:countries @=@ TC @=@ Turks and Caicos Islands +dnet:countries @=@ dnet:countries @=@ TV @=@ Tuvalu +dnet:countries @=@ dnet:countries @=@ UNKNOWN @=@ UNKNOWN +dnet:countries @=@ dnet:countries @=@ UG @=@ Uganda +dnet:countries @=@ dnet:countries @=@ UA @=@ Ukraine +dnet:countries @=@ dnet:countries @=@ AE @=@ United Arab Emirates +dnet:countries @=@ dnet:countries @=@ GB @=@ United Kingdom +dnet:countries @=@ dnet:countries @=@ US @=@ United States +dnet:countries @=@ dnet:countries @=@ UM @=@ United States Minor Outlying Islands +dnet:countries @=@ dnet:countries @=@ UY @=@ Uruguay +dnet:countries @=@ dnet:countries @=@ UZ @=@ Uzbekistan +dnet:countries @=@ dnet:countries @=@ VU @=@ Vanuatu +dnet:countries @=@ dnet:countries @=@ VE @=@ Venezuela +dnet:countries @=@ dnet:countries @=@ VN @=@ Viet Nam +dnet:countries @=@ dnet:countries @=@ VG @=@ Virgin Islands (British) +dnet:countries @=@ dnet:countries @=@ VI @=@ Virgin Islands, U.S. +dnet:countries @=@ dnet:countries @=@ WF @=@ Wallis and Futuna +dnet:countries @=@ dnet:countries @=@ EH @=@ Western Sahara +dnet:countries @=@ dnet:countries @=@ YE @=@ Yemen +dnet:countries @=@ dnet:countries @=@ YU @=@ Yugoslavia +dnet:countries @=@ dnet:countries @=@ ZM @=@ Zambia +dnet:countries @=@ dnet:countries @=@ ZW @=@ Zimbabwe +dnet:countries @=@ dnet:countries @=@ AX @=@ Åland Islands +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ openaire2.0 @=@ OpenAIRE 2.0 (EC funding) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ driver-openaire2.0 @=@ OpenAIRE 2.0+ (DRIVER OA, EC funding) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ openaire3.0 @=@ OpenAIRE 3.0 (OA, funding) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ openaire4.0 @=@ OpenAIRE 4.0 (inst.&thematic. repo.) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ driver @=@ OpenAIRE Basic (DRIVER OA) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ openaire2.0_data @=@ OpenAIRE Data (funded, referenced datasets) +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ hostedBy @=@ collected from a compatible aggregator +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ UNKNOWN @=@ not available +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ native @=@ proprietary +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ notCompatible @=@ under validation +dnet:datasourceCompatibilityLevel @=@ dnet:datasourceCompatibilityLevel @=@ openaire-cris_1.1 @=@ OpenAIRE CRIS v1.1 +fct:funding_relations @=@ fct:funding_relations @=@ fct:hasParentFunding @=@ fct:hasParentFunding +dnet:protocols @=@ dnet:protocols @=@ HTTPWithFileName @=@ HTTPWithFileName +dnet:protocols @=@ dnet:protocols @=@ NetCDF @=@ NetCDF +dnet:protocols @=@ dnet:protocols @=@ OpenDAP @=@ OpenDAP +dnet:protocols @=@ dnet:protocols @=@ schemaorg @=@ Schema.org +dnet:protocols @=@ dnet:protocols @=@ UNKNOWN @=@ UNKNOWN +dnet:protocols @=@ dnet:protocols @=@ api @=@ api +dnet:protocols @=@ dnet:protocols @=@ dataciteESPlugins @=@ dataciteESPlugins +dnet:protocols @=@ dnet:protocols @=@ datasetsbyjournal @=@ datasetsbyjournal +dnet:protocols @=@ dnet:protocols @=@ datasetsbyproject @=@ datasetsbyproject +dnet:protocols @=@ dnet:protocols @=@ excelFile @=@ excelFile +dnet:protocols @=@ dnet:protocols @=@ file @=@ file +dnet:protocols @=@ dnet:protocols @=@ fileGzip @=@ fileGzip +dnet:protocols @=@ dnet:protocols @=@ files_by_rpc @=@ files_by_rpc +dnet:protocols @=@ dnet:protocols @=@ files_from_mdstore @=@ files_from_mdstore +dnet:protocols @=@ dnet:protocols @=@ files_from_metadata @=@ files_from_metadata +dnet:protocols @=@ dnet:protocols @=@ filesystem @=@ filesystem +dnet:protocols @=@ dnet:protocols @=@ ftp @=@ ftp +dnet:protocols @=@ dnet:protocols @=@ gristProjects @=@ gristProjects +dnet:protocols @=@ dnet:protocols @=@ gtr2Projects @=@ gtr2Projects +dnet:protocols @=@ dnet:protocols @=@ http @=@ http +dnet:protocols @=@ dnet:protocols @=@ httpCSV @=@ httpCSV +dnet:protocols @=@ dnet:protocols @=@ httpList @=@ httpList +dnet:protocols @=@ dnet:protocols @=@ jdbc @=@ jdbc +dnet:protocols @=@ dnet:protocols @=@ oai @=@ oai +dnet:protocols @=@ dnet:protocols @=@ oai_sets @=@ oai_sets +dnet:protocols @=@ dnet:protocols @=@ other @=@ other +dnet:protocols @=@ dnet:protocols @=@ re3data @=@ re3data +dnet:protocols @=@ dnet:protocols @=@ rest @=@ rest +dnet:protocols @=@ dnet:protocols @=@ rest_json2xml @=@ rest_json2xml +dnet:protocols @=@ dnet:protocols @=@ sftp @=@ sftp +dnet:protocols @=@ dnet:protocols @=@ soap @=@ soap +dnet:protocols @=@ dnet:protocols @=@ sparql @=@ sparql +dnet:protocols @=@ dnet:protocols @=@ sword @=@ sword +dnet:protocols @=@ dnet:protocols @=@ targz @=@ targz +dnet:protocols @=@ dnet:protocols @=@ remoteMdstore @=@ remoteMdstore +wt:funding_typologies @=@ Wellcome Trust: Funding Typologies @=@ wt:fundingStream @=@ Wellcome Trust: Funding Stream +dnet:externalReference_typologies @=@ dnet:externalReference_typologies @=@ accessionNumber @=@ accessionNumber +dnet:externalReference_typologies @=@ dnet:externalReference_typologies @=@ dataset @=@ dataset +dnet:externalReference_typologies @=@ dnet:externalReference_typologies @=@ software @=@ software +datacite:id_typologies @=@ datacite:id_typologies @=@ ARK @=@ ARK +datacite:id_typologies @=@ datacite:id_typologies @=@ DOI @=@ DOI +datacite:id_typologies @=@ datacite:id_typologies @=@ EAN13 @=@ EAN13 +datacite:id_typologies @=@ datacite:id_typologies @=@ EISSN @=@ EISSN +datacite:id_typologies @=@ datacite:id_typologies @=@ Handle @=@ Handle +datacite:id_typologies @=@ datacite:id_typologies @=@ ISBN @=@ ISBN +datacite:id_typologies @=@ datacite:id_typologies @=@ ISSN @=@ ISSN +datacite:id_typologies @=@ datacite:id_typologies @=@ ISTC @=@ ISTC +datacite:id_typologies @=@ datacite:id_typologies @=@ LISSN @=@ LISSN +datacite:id_typologies @=@ datacite:id_typologies @=@ LSID @=@ LSID +datacite:id_typologies @=@ datacite:id_typologies @=@ PURL @=@ PURL +datacite:id_typologies @=@ datacite:id_typologies @=@ UNKNOWN @=@ UNKNOWN +datacite:id_typologies @=@ datacite:id_typologies @=@ UPC @=@ UPC +datacite:id_typologies @=@ datacite:id_typologies @=@ URL @=@ URL +datacite:id_typologies @=@ datacite:id_typologies @=@ URN @=@ URN +dnet:pid_types @=@ dnet:pid_types @=@ actrn @=@ ACTRN Identifier +dnet:pid_types @=@ dnet:pid_types @=@ nct @=@ ClinicalTrials.gov Identifier +dnet:pid_types @=@ dnet:pid_types @=@ euctr @=@ EU Clinical Trials Register +dnet:pid_types @=@ dnet:pid_types @=@ epo_id @=@ European Patent Office application ID +dnet:pid_types @=@ dnet:pid_types @=@ gsk @=@ GSK Identifier +dnet:pid_types @=@ dnet:pid_types @=@ GeoPass @=@ Geographic Location-Password Scheme +dnet:pid_types @=@ dnet:pid_types @=@ GBIF @=@ Global Biodiversity Information Facility +dnet:pid_types @=@ dnet:pid_types @=@ isrctn @=@ ISRCTN Identifier +dnet:pid_types @=@ dnet:pid_types @=@ ISNI @=@ International Standard Name Identifier +dnet:pid_types @=@ dnet:pid_types @=@ jprn @=@ JPRN Identifier +dnet:pid_types @=@ dnet:pid_types @=@ mag_id @=@ Microsoft Academic Graph Identifier +dnet:pid_types @=@ dnet:pid_types @=@ oai @=@ Open Archives Initiative +dnet:pid_types @=@ dnet:pid_types @=@ orcid @=@ Open Researcher and Contributor ID +dnet:pid_types @=@ dnet:pid_types @=@ PANGAEA @=@ PANGAEA +dnet:pid_types @=@ dnet:pid_types @=@ epo_nr_epodoc @=@ Patent application number in EPODOC format +dnet:pid_types @=@ dnet:pid_types @=@ UNKNOWN @=@ UNKNOWN +dnet:pid_types @=@ dnet:pid_types @=@ VIAF @=@ Virtual International Authority File +dnet:pid_types @=@ dnet:pid_types @=@ arXiv @=@ arXiv +dnet:pid_types @=@ dnet:pid_types @=@ doi @=@ doi +dnet:pid_types @=@ dnet:pid_types @=@ grid @=@ grid +dnet:pid_types @=@ dnet:pid_types @=@ info:eu-repo/dai @=@ info:eu-repo/dai +dnet:pid_types @=@ dnet:pid_types @=@ orcidworkid @=@ orcid workid +dnet:pid_types @=@ dnet:pid_types @=@ pmc @=@ pmc +dnet:pid_types @=@ dnet:pid_types @=@ pmid @=@ pmid +dnet:pid_types @=@ dnet:pid_types @=@ urn @=@ urn +dnet:pid_types @=@ dnet:pid_types @=@ who @=@ WHO Identifier +dnet:pid_types @=@ dnet:pid_types @=@ drks @=@ DRKS Identifier +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ACM @=@ An ACM classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/ARXIV @=@ An ARXIV classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/DDC @=@ A Dewey Decimal classification term (DDC) that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/JEL @=@ A Journal of Economic Literature (JEL) classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/OPENACCESS_VERSION @=@ An Open Access versions of your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/IS_REFERENCED_BY @=@ A dataset referenced by your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/REFERENCES @=@ A dataset that refers to your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/IS_RELATED_TO @=@ A dataset related to your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/IS_SUPPLEMENTED_TO @=@ A dataset that supplements your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION/IS_RELATED_TO @=@ A publication related to your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION/REFERENCES @=@ A publication referenced by your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION/IS_REFERENCED_BY @=@ A publication that refers to your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION/IS_SUPPLEMENTED_BY @=@ A publication that is supplemented by your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION/IS_SUPPLEMENTED_TO @=@ A publication that supplements your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SOFTWARE @=@ A software referred by your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/OPENACCESS_VERSION @=@ Another Open Access version of a publication +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/PID @=@ Another persistent identifier associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/SUBJECT/MESHEUROPMC @=@ A classification term from the Medical Subject Headings (MeSH) that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/ABSTRACT @=@ An abstract describing among your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PUBLICATION_DATE @=@ A date of publication missing in your content +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PID @=@ A persistent identifier associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/SUBJECT/ACM @=@ Another ACM classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/SUBJECT/ARXIV @=@ Another ARXIV classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/SUBJECT/DDC @=@ Another Dewey Decimal classification term (DDC) that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/SUBJECT/JEL @=@ Another Journal of Economic Literature (JEL) classification term that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MORE/SUBJECT/MESHEUROPMC @=@ Another classification term from the Medical Subject Headings (MeSH) that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/PROJECT @=@ A project reference that can be associated to your publications +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/IS_SUPPLEMENTED_BY @=@ A dataset that is supplemented by your records +dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/AUTHOR/ORCID @=@ An Open Researcher and Contributor ID (ORCID) that can be associated to an author of your publications +dnet:review_levels @=@ dnet:review_levels @=@ 0000 @=@ Unknown +dnet:review_levels @=@ dnet:review_levels @=@ 0002 @=@ nonPeerReviewed +dnet:review_levels @=@ dnet:review_levels @=@ 0001 @=@ peerReviewed \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/raw/datasources_resultset_entry.json b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/raw/datasources_resultset_entry.json index f4c5f97ed..0f1da7095 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/raw/datasources_resultset_entry.json +++ b/dhp-workflows/dhp-graph-mapper/src/test/resources/eu/dnetlib/dhp/oa/graph/raw/datasources_resultset_entry.json @@ -6,9 +6,10 @@ }, { "field": "identities", - "type": "not_used", + "type": "array", "value": [ "274269ac6f3b::2579-5449", + "piwik:13", null ] }, diff --git a/dhp-workflows/dhp-graph-provision-scholexplorer/pom.xml b/dhp-workflows/dhp-graph-provision-scholexplorer/pom.xml index e0ce739cf..2466ca8e2 100644 --- a/dhp-workflows/dhp-graph-provision-scholexplorer/pom.xml +++ b/dhp-workflows/dhp-graph-provision-scholexplorer/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-graph-provision/pom.xml b/dhp-workflows/dhp-graph-provision/pom.xml index 62bf7186c..b0aec1e5d 100644 --- a/dhp-workflows/dhp-graph-provision/pom.xml +++ b/dhp-workflows/dhp-graph-provision/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java index d950a816d..b2aa01dc7 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/utils/XmlRecordFactory.java @@ -896,6 +896,12 @@ public class XmlRecordFactory implements Serializable { if (p.getContracttype() != null) { metadata.add(XmlSerializationUtils.mapQualifier("contracttype", p.getContracttype())); } + if (p.getOamandatepublications() != null) { + metadata + .add( + XmlSerializationUtils + .asXmlElement("oamandatepublications", p.getOamandatepublications().getValue())); + } if (p.getEcsc39() != null) { metadata.add(XmlSerializationUtils.asXmlElement("ecsc39", p.getEcsc39().getValue())); } @@ -1165,10 +1171,10 @@ public class XmlRecordFactory implements Serializable { .asXmlElement( "distributionlocation", instance.getDistributionlocation())); } - if (instance.getRefereed() != null && isNotBlank(instance.getRefereed().getValue())) { + if (instance.getRefereed() != null && !instance.getRefereed().isBlank()) { fields .add( - XmlSerializationUtils.asXmlElement("refereed", instance.getRefereed().getValue())); + XmlSerializationUtils.mapQualifier("refereed", instance.getRefereed())); } if (instance.getProcessingchargeamount() != null && isNotBlank(instance.getProcessingchargeamount().getValue())) { diff --git a/dhp-workflows/dhp-stats-update/pom.xml b/dhp-workflows/dhp-stats-update/pom.xml index d6ec4e6ab..397bd8d08 100644 --- a/dhp-workflows/dhp-stats-update/pom.xml +++ b/dhp-workflows/dhp-stats-update/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 dhp-stats-update diff --git a/dhp-workflows/dhp-worfklow-profiles/pom.xml b/dhp-workflows/dhp-worfklow-profiles/pom.xml index cb20db57e..e03362034 100644 --- a/dhp-workflows/dhp-worfklow-profiles/pom.xml +++ b/dhp-workflows/dhp-worfklow-profiles/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT 4.0.0 diff --git a/dhp-workflows/dhp-worfklow-profiles/src/main/resources/eu/dnetlib/dhp/wf/profiles/provision.xml b/dhp-workflows/dhp-worfklow-profiles/src/main/resources/eu/dnetlib/dhp/wf/profiles/provision.xml index 487afee4f..28cbde70d 100644 --- a/dhp-workflows/dhp-worfklow-profiles/src/main/resources/eu/dnetlib/dhp/wf/profiles/provision.xml +++ b/dhp-workflows/dhp-worfklow-profiles/src/main/resources/eu/dnetlib/dhp/wf/profiles/provision.xml @@ -4,7 +4,7 @@ - + Data Provision [OCEAN] @@ -61,11 +61,23 @@ + + + Set the target path to store the CLEANED graph + + cleanedGraphPath + /tmp/beta_provision/graph/05_graph_cleaned + + + + + + Set the target path to store the ORCID enriched graph orcidGraphPath - /tmp/beta_provision/graph/05_graph_orcid + /tmp/beta_provision/graph/06_graph_orcid @@ -75,7 +87,7 @@ Set the target path to store the BULK TAGGED graph bulkTaggingGraphPath - /tmp/beta_provision/graph/06_graph_bulktagging + /tmp/beta_provision/graph/07_graph_bulktagging @@ -85,7 +97,7 @@ Set the target path to store the AFFILIATION from INSTITUTIONAL REPOS graph affiliationGraphPath - /tmp/beta_provision/graph/07_graph_affiliation + /tmp/beta_provision/graph/08_graph_affiliation @@ -95,7 +107,7 @@ Set the target path to store the COMMUNITY from SELECTED SOURCES graph communityOrganizationGraphPath - /tmp/beta_provision/graph/08_graph_comunity_organization + /tmp/beta_provision/graph/09_graph_comunity_organization @@ -105,7 +117,7 @@ Set the target path to store the FUNDING from SEMANTIC RELATION graph fundingGraphPath - /tmp/beta_provision/graph/09_graph_funding + /tmp/beta_provision/graph/10_graph_funding @@ -115,7 +127,7 @@ Set the target path to store the COMMUNITY from SEMANTIC RELATION graph communitySemRelGraphPath - /tmp/beta_provision/graph/10_graph_comunity_sem_rel + /tmp/beta_provision/graph/11_graph_comunity_sem_rel @@ -125,7 +137,7 @@ Set the target path to store the COUNTRY enriched graph countryGraphPath - /tmp/beta_provision/graph/11_graph_country + /tmp/beta_provision/graph/12_graph_country @@ -135,7 +147,7 @@ Set the target path to store the blacklisted graph blacklistedGraphPath - /tmp/beta_provision/graph/12_graph_blacklisted + /tmp/beta_provision/graph/13_graph_blacklisted @@ -217,7 +229,8 @@ IIS { - 'graphOutputPath' : 'aggregatorGraphPath' + 'graphOutputPath' : 'aggregatorGraphPath', + 'isLookupUrl' : 'isLookUpUrl' } @@ -227,8 +240,8 @@ 'mongoDb' : 'mdstore', 'postgresURL' : 'jdbc:postgresql://beta.services.openaire.eu:5432/dnet_openaireplus', 'postgresUser' : 'dnet', - 'postgresPassword' : 'dnetPwd', - 'reuseContent' : 'true', + 'postgresPassword' : '', + 'reuseContent' : 'false', 'contentPath' : '/tmp/beta_provision/aggregator', 'workingDir' : '/tmp/beta_provision/working_dir/aggregator' } @@ -255,6 +268,8 @@ { 'oozie.wf.application.path' : '/lib/dnet/actionmanager/wf/main/oozie_app', + 'sparkExecutorCores' : '3', + 'sparkExecutorMemory' : '10G', 'activePromoteDatasetActionPayload' : 'true', 'activePromoteDatasourceActionPayload' : 'true', 'activePromoteOrganizationActionPayload' : 'true', @@ -315,6 +330,8 @@ { 'oozie.wf.application.path' : '/lib/dnet/actionmanager/wf/main/oozie_app', + 'sparkExecutorCores' : '3', + 'sparkExecutorMemory' : '10G', 'activePromoteDatasetActionPayload' : 'true', 'activePromoteDatasourceActionPayload' : 'true', 'activePromoteOrganizationActionPayload' : 'true', @@ -354,7 +371,43 @@ build-report - + + + + + + clean the properties in the graph typed as Qualifier according to the vocabulary indicated in schemeid + + executeOozieJob + IIS + + { + 'graphInputPath' : 'consistentGraphPath', + 'graphOutputPath': 'cleanedGraphPath', + 'isLookupUrl': 'isLookUpUrl' + } + + + { + 'oozie.wf.application.path' : '/lib/dnet/oa/graph/clean/oozie_app', + 'workingPath' : '/tmp/beta_provision/working_dir/clean' + } + + build-report + + + + + + + + Do we skip the graph enrichment steps? (Yes to prepare the graph for the IIS) + + NO + + + + @@ -364,7 +417,7 @@ IIS { - 'sourcePath' : 'consistentGraphPath', + 'sourcePath' : 'cleanedGraphPath', 'outputPath': 'orcidGraphPath' } @@ -552,7 +605,7 @@ 'workingDir' : '/tmp/beta_provision/working_dir/blacklist', 'postgresURL' : 'jdbc:postgresql://beta.services.openaire.eu:5432/dnet_openaireplus', 'postgresUser' : 'dnet', - 'postgresPassword' : 'dnetPwd' + 'postgresPassword' : '' } build-report @@ -561,12 +614,13 @@ + - wf_20200509_100941_857 - 2020-05-09T13:26:09+00:00 - FAILURE - eu.dnetlib.data.hadoop.rmi.HadoopServiceException: hadoop job: 0002933-200403132837156-oozie-oozi-W failed with status: KILLED, oozie log: 2020-05-09 13:23:31,194 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[] No results found 2020-05-09 13:23:31,216 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@:start:] Start action [0002933-200403132837156-oozie-oozi-W@:start:] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:31,216 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@:start:] [***0002933-200403132837156-oozie-oozi-W@:start:***]Action status=DONE 2020-05-09 13:23:31,216 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@:start:] [***0002933-200403132837156-oozie-oozi-W@:start:***]Action updated in DB! 2020-05-09 13:23:31,257 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@:start:] No results found 2020-05-09 13:23:31,275 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@:start:] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@:start: 2020-05-09 13:23:31,275 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W 2020-05-09 13:23:31,314 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@reset-outputpath] Start action [0002933-200403132837156-oozie-oozi-W@reset-outputpath] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:33,897 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@reset-outputpath] [***0002933-200403132837156-oozie-oozi-W@reset-outputpath***]Action status=DONE 2020-05-09 13:23:33,897 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@reset-outputpath] [***0002933-200403132837156-oozie-oozi-W@reset-outputpath***]Action updated in DB! 2020-05-09 13:23:33,947 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@reset-outputpath] No results found 2020-05-09 13:23:33,966 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] Start action [0002933-200403132837156-oozie-oozi-W@copy_entities] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:33,966 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] [***0002933-200403132837156-oozie-oozi-W@copy_entities***]Action status=DONE 2020-05-09 13:23:33,966 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] [***0002933-200403132837156-oozie-oozi-W@copy_entities***]Action updated in DB! 2020-05-09 13:23:34,012 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] No results found 2020-05-09 13:23:34,018 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] No results found 2020-05-09 13:23:34,023 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] No results found 2020-05-09 13:23:34,029 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] No results found 2020-05-09 13:23:34,124 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] Start action [0002933-200403132837156-oozie-oozi-W@copy_relation] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:34,130 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] Start action [0002933-200403132837156-oozie-oozi-W@copy_projects] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:34,130 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] Start action [0002933-200403132837156-oozie-oozi-W@copy_datasources] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:34,140 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] Start action [0002933-200403132837156-oozie-oozi-W@copy_organization] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:23:35,010 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] checking action, hadoop job ID [job_1585920557248_14569] status [RUNNING] 2020-05-09 13:23:35,018 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] [***0002933-200403132837156-oozie-oozi-W@copy_projects***]Action status=RUNNING 2020-05-09 13:23:35,018 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] [***0002933-200403132837156-oozie-oozi-W@copy_projects***]Action updated in DB! 2020-05-09 13:23:35,022 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] checking action, hadoop job ID [job_1585920557248_14568] status [RUNNING] 2020-05-09 13:23:35,027 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_projects 2020-05-09 13:23:35,028 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] [***0002933-200403132837156-oozie-oozi-W@copy_relation***]Action status=RUNNING 2020-05-09 13:23:35,028 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] [***0002933-200403132837156-oozie-oozi-W@copy_relation***]Action updated in DB! 2020-05-09 13:23:35,031 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] checking action, hadoop job ID [job_1585920557248_14570] status [RUNNING] 2020-05-09 13:23:35,035 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] [***0002933-200403132837156-oozie-oozi-W@copy_datasources***]Action status=RUNNING 2020-05-09 13:23:35,035 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] [***0002933-200403132837156-oozie-oozi-W@copy_datasources***]Action updated in DB! 2020-05-09 13:23:35,037 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_relation 2020-05-09 13:23:35,048 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_datasources 2020-05-09 13:23:35,072 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] checking action, hadoop job ID [job_1585920557248_14571] status [RUNNING] 2020-05-09 13:23:35,076 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] [***0002933-200403132837156-oozie-oozi-W@copy_organization***]Action status=RUNNING 2020-05-09 13:23:35,076 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] [***0002933-200403132837156-oozie-oozi-W@copy_organization***]Action updated in DB! 2020-05-09 13:23:35,084 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_organization 2020-05-09 13:23:35,090 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_entities] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_entities 2020-05-09 13:23:35,090 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@reset-outputpath] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@reset-outputpath 2020-05-09 13:23:58,926 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] callback for action [0002933-200403132837156-oozie-oozi-W@copy_datasources] 2020-05-09 13:23:59,085 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] checking action, hadoop job ID [job_1585920557248_14570] status [RUNNING] 2020-05-09 13:23:59,242 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] callback for action [0002933-200403132837156-oozie-oozi-W@copy_projects] 2020-05-09 13:23:59,386 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] checking action, hadoop job ID [job_1585920557248_14569] status [RUNNING] 2020-05-09 13:24:01,343 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] callback for action [0002933-200403132837156-oozie-oozi-W@copy_datasources] 2020-05-09 13:24:01,418 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] Hadoop Jobs launched : [job_1585920557248_14573] 2020-05-09 13:24:01,418 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] action completed, external ID [job_1585920557248_14570] 2020-05-09 13:24:01,493 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_datasources] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_datasources 2020-05-09 13:24:01,935 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] callback for action [0002933-200403132837156-oozie-oozi-W@copy_projects] 2020-05-09 13:24:02,012 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] Hadoop Jobs launched : [job_1585920557248_14572] 2020-05-09 13:24:02,012 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] action completed, external ID [job_1585920557248_14569] 2020-05-09 13:24:02,076 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_projects] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_projects 2020-05-09 13:25:03,172 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] callback for action [0002933-200403132837156-oozie-oozi-W@copy_organization] 2020-05-09 13:25:03,336 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] checking action, hadoop job ID [job_1585920557248_14571] status [RUNNING] 2020-05-09 13:25:05,598 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] callback for action [0002933-200403132837156-oozie-oozi-W@copy_organization] 2020-05-09 13:25:05,688 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] Hadoop Jobs launched : [job_1585920557248_14574] 2020-05-09 13:25:05,691 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] action completed, external ID [job_1585920557248_14571] 2020-05-09 13:25:05,748 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_organization] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_organization 2020-05-09 13:25:23,274 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] callback for action [0002933-200403132837156-oozie-oozi-W@copy_relation] 2020-05-09 13:25:23,409 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] checking action, hadoop job ID [job_1585920557248_14568] status [RUNNING] 2020-05-09 13:25:25,419 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] callback for action [0002933-200403132837156-oozie-oozi-W@copy_relation] 2020-05-09 13:25:25,510 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] Hadoop Jobs launched : [job_1585920557248_14575] 2020-05-09 13:25:25,511 INFO org.apache.oozie.action.hadoop.DistcpActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] action completed, external ID [job_1585920557248_14568] 2020-05-09 13:25:25,565 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] No results found 2020-05-09 13:25:25,585 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_wait] Start action [0002933-200403132837156-oozie-oozi-W@copy_wait] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:25,585 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_wait] [***0002933-200403132837156-oozie-oozi-W@copy_wait***]Action status=DONE 2020-05-09 13:25:25,585 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_wait] [***0002933-200403132837156-oozie-oozi-W@copy_wait***]Action updated in DB! 2020-05-09 13:25:25,627 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_wait] No results found 2020-05-09 13:25:25,648 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] Start action [0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:25,648 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] [***0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1***]Action status=DONE 2020-05-09 13:25:25,648 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] [***0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1***]Action updated in DB! 2020-05-09 13:25:25,694 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] No results found 2020-05-09 13:25:25,700 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] No results found 2020-05-09 13:25:25,706 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] No results found 2020-05-09 13:25:25,711 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] No results found 2020-05-09 13:25:25,801 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] Start action [0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:25,825 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] Start action [0002933-200403132837156-oozie-oozi-W@join_prepare_software] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:25,825 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] Start action [0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:25,828 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] Start action [0002933-200403132837156-oozie-oozi-W@join_prepare_publication] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:27,165 INFO org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] checking action, hadoop job ID [job_1585920557248_14578] status [RUNNING] 2020-05-09 13:25:27,170 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] [***0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct***]Action status=RUNNING 2020-05-09 13:25:27,170 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] [***0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct***]Action updated in DB! 2020-05-09 13:25:27,179 INFO org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] checking action, hadoop job ID [job_1585920557248_14577] status [RUNNING] 2020-05-09 13:25:27,181 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct 2020-05-09 13:25:27,183 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] [***0002933-200403132837156-oozie-oozi-W@join_prepare_software***]Action status=RUNNING 2020-05-09 13:25:27,183 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] [***0002933-200403132837156-oozie-oozi-W@join_prepare_software***]Action updated in DB! 2020-05-09 13:25:27,188 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_software 2020-05-09 13:25:27,617 INFO org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] checking action, hadoop job ID [job_1585920557248_14576] status [RUNNING] 2020-05-09 13:25:27,622 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] [***0002933-200403132837156-oozie-oozi-W@join_prepare_publication***]Action status=RUNNING 2020-05-09 13:25:27,622 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] [***0002933-200403132837156-oozie-oozi-W@join_prepare_publication***]Action updated in DB! 2020-05-09 13:25:27,625 INFO org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] checking action, hadoop job ID [job_1585920557248_14579] status [RUNNING] 2020-05-09 13:25:27,628 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_publication 2020-05-09 13:25:27,629 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] [***0002933-200403132837156-oozie-oozi-W@join_prepare_dataset***]Action status=RUNNING 2020-05-09 13:25:27,629 INFO org.apache.oozie.command.wf.ForkedActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] [***0002933-200403132837156-oozie-oozi-W@join_prepare_dataset***]Action updated in DB! 2020-05-09 13:25:27,634 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_dataset 2020-05-09 13:25:27,639 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@fork_prepare_assoc_step1 2020-05-09 13:25:27,639 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_wait] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_wait 2020-05-09 13:25:27,640 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@copy_relation] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@copy_relation 2020-05-09 13:25:41,416 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] callback for action [0002933-200403132837156-oozie-oozi-W@join_prepare_software] 2020-05-09 13:25:41,490 INFO org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] action completed, external ID [job_1585920557248_14577] 2020-05-09 13:25:41,495 WARN org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] Launcher ERROR, reason: Main class [org.apache.oozie.action.hadoop.SparkMain], main() threw exception, File file:/data/3/yarn/nm/usercache/dnet.beta/appcache/application_1585920557248_14577/container_e68_1585920557248_14577_01_000002/dhp-propagation-1.1.8-SNAPSHOT.jar does not exist 2020-05-09 13:25:41,495 WARN org.apache.oozie.action.hadoop.SparkActionExecutor: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] Launcher exception: File file:/data/3/yarn/nm/usercache/dnet.beta/appcache/application_1585920557248_14577/container_e68_1585920557248_14577_01_000002/dhp-propagation-1.1.8-SNAPSHOT.jar does not exist java.io.FileNotFoundException: File file:/data/3/yarn/nm/usercache/dnet.beta/appcache/application_1585920557248_14577/container_e68_1585920557248_14577_01_000002/dhp-propagation-1.1.8-SNAPSHOT.jar does not exist at org.apache.hadoop.fs.RawLocalFileSystem.deprecatedGetFileStatus(RawLocalFileSystem.java:598) at org.apache.hadoop.fs.RawLocalFileSystem.getFileLinkStatusInternal(RawLocalFileSystem.java:811) at org.apache.hadoop.fs.RawLocalFileSystem.getFileStatus(RawLocalFileSystem.java:588) at org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:432) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:340) at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:292) at org.apache.spark.deploy.yarn.Client.copyFileToRemote(Client.scala:404) at org.apache.spark.deploy.yarn.Client.org$apache$spark$deploy$yarn$Client$$distribute$1(Client.scala:496) at org.apache.spark.deploy.yarn.Client$$anonfun$prepareLocalResources$9.apply(Client.scala:595) at org.apache.spark.deploy.yarn.Client$$anonfun$prepareLocalResources$9.apply(Client.scala:594) at scala.Option.foreach(Option.scala:257) at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:594) at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:886) at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:180) at org.apache.spark.deploy.yarn.Client.run(Client.scala:1156) at org.apache.spark.deploy.yarn.YarnClusterApplication.start(Client.scala:1608) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:849) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:933) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) at org.apache.oozie.action.hadoop.SparkMain.runSpark(SparkMain.java:178) at org.apache.oozie.action.hadoop.SparkMain.run(SparkMain.java:90) at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:81) at org.apache.oozie.action.hadoop.SparkMain.main(SparkMain.java:57) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:235) at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:459) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1924) at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158) 2020-05-09 13:25:41,514 INFO org.apache.oozie.command.wf.ActionEndXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] ERROR is considered as FAILED for SLA 2020-05-09 13:25:41,541 INFO org.apache.oozie.service.JPAService: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] No results found 2020-05-09 13:25:41,580 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@Kill] Start action [0002933-200403132837156-oozie-oozi-W@Kill] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10] 2020-05-09 13:25:41,580 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@Kill] [***0002933-200403132837156-oozie-oozi-W@Kill***]Action status=DONE 2020-05-09 13:25:41,580 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@Kill] [***0002933-200403132837156-oozie-oozi-W@Kill***]Action updated in DB! 2020-05-09 13:25:41,692 WARN org.apache.oozie.workflow.lite.LiteWorkflowInstance: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@Kill] Workflow completed [KILLED], killing [3] running nodes 2020-05-09 13:25:41,760 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@Kill] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@Kill 2020-05-09 13:25:41,766 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_software] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_software 2020-05-09 13:25:41,852 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct 2020-05-09 13:25:41,914 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] callback for action [0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] 2020-05-09 13:25:41,920 ERROR org.apache.oozie.command.wf.CompletedActionXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] XException, org.apache.oozie.command.CommandException: E0800: Action it is not running its in [KILLED] state, action [0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] at org.apache.oozie.command.wf.CompletedActionXCommand.eagerVerifyPrecondition(CompletedActionXCommand.java:92) at org.apache.oozie.command.XCommand.call(XCommand.java:257) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.oozie.service.CallableQueueService$CallableWrapper.run(CallableQueueService.java:179) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 2020-05-09 13:25:41,938 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_publication 2020-05-09 13:25:42,005 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] callback for action [0002933-200403132837156-oozie-oozi-W@join_prepare_publication] 2020-05-09 13:25:42,010 ERROR org.apache.oozie.command.wf.CompletedActionXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_publication] XException, org.apache.oozie.command.CommandException: E0800: Action it is not running its in [KILLED] state, action [0002933-200403132837156-oozie-oozi-W@join_prepare_publication] at org.apache.oozie.command.wf.CompletedActionXCommand.eagerVerifyPrecondition(CompletedActionXCommand.java:92) at org.apache.oozie.command.XCommand.call(XCommand.java:257) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.oozie.service.CallableQueueService$CallableWrapper.run(CallableQueueService.java:179) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 2020-05-09 13:25:42,028 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[dnet.beta] GROUP[-] TOKEN[] APP[orcid_to_result_from_semrel_propagation] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W 2020-05-09 13:25:42,028 INFO org.apache.oozie.command.wf.WorkflowNotificationXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_dataset] No Notification URL is defined. Therefore nothing to notify for job 0002933-200403132837156-oozie-oozi-W@join_prepare_dataset 2020-05-09 13:25:42,113 INFO org.apache.oozie.servlet.CallbackServlet: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[-] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] callback for action [0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] 2020-05-09 13:25:42,116 ERROR org.apache.oozie.command.wf.CompletedActionXCommand: SERVER[iis-cdh5-test-m3.ocean.icm.edu.pl] USER[-] GROUP[-] TOKEN[] APP[-] JOB[0002933-200403132837156-oozie-oozi-W] ACTION[0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] XException, org.apache.oozie.command.CommandException: E0800: Action it is not running its in [KILLED] state, action [0002933-200403132837156-oozie-oozi-W@join_prepare_otherresearchproduct] at org.apache.oozie.command.wf.CompletedActionXCommand.eagerVerifyPrecondition(CompletedActionXCommand.java:92) at org.apache.oozie.command.XCommand.call(XCommand.java:257) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at org.apache.oozie.service.CallableQueueService$CallableWrapper.run(CallableQueueService.java:179) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) + wf_20200615_163630_609 + 2020-06-15T17:08:00+00:00 + SUCCESS + \ No newline at end of file diff --git a/dhp-workflows/pom.xml b/dhp-workflows/pom.xml index 11e5be0b6..8d8d57c84 100644 --- a/dhp-workflows/pom.xml +++ b/dhp-workflows/pom.xml @@ -6,7 +6,7 @@ eu.dnetlib.dhp dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT ../ diff --git a/pom.xml b/pom.xml index f4b96fefb..06e2b7aaf 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 eu.dnetlib.dhp dhp - 1.2.2-SNAPSHOT + 1.2.3-SNAPSHOT pom @@ -193,7 +193,6 @@ net.sf.saxon Saxon-HE 9.9.1-6 - provided