forked from D-Net/dnet-hadoop
Compare commits
89 Commits
master
...
usage-stat
Author | SHA1 | Date |
---|---|---|
Spyros Zoupanos | cfea668999 | |
Spyros Zoupanos | 6ff5436991 | |
Spyros Zoupanos | 8b591937e1 | |
Spyros Zoupanos | 6b5b6796b7 | |
Spyros Zoupanos | 1ca74ce830 | |
Spyros Zoupanos | 7fdf994eb6 | |
Spyros Zoupanos | 6cc58e2720 | |
Spyros Zoupanos | a1dc930486 | |
Spyros Zoupanos | 8f24a6388e | |
Spyros Zoupanos | 8826684130 | |
Spyros Zoupanos | d9cc70d334 | |
Spyros Zoupanos | a852dd3a0d | |
Spyros Zoupanos | dd9df4ae58 | |
Spyros Zoupanos | 8b08f35dfe | |
Spyros Zoupanos | 715bbd487d | |
Spyros Zoupanos | 2de17e7f32 | |
Spyros Zoupanos | 8da64d8f54 | |
Spyros Zoupanos | 9a1512004f | |
Spyros Zoupanos | 915b3287a8 | |
Spyros Zoupanos | 806a450465 | |
Spyros Zoupanos | 48d6bf28eb | |
Spyros Zoupanos | 2b330dd84c | |
Spyros Zoupanos | 7b7075cfdd | |
Spyros Zoupanos | e2748fea95 | |
Spyros Zoupanos | 2e2e2b8b29 | |
Spyros Zoupanos | 07e750939f | |
Spyros Zoupanos | c6bbe215e1 | |
Spyros Zoupanos | c1257ac7c5 | |
Spyros Zoupanos | dc6114a24e | |
Spyros Zoupanos | 73656f7f31 | |
Spyros Zoupanos | a497b19b21 | |
Spyros Zoupanos | bc5cf28375 | |
Spyros Zoupanos | 69640f5fc4 | |
Spyros Zoupanos | 1ceb363cb2 | |
Spyros Zoupanos | 65acece7c4 | |
Spyros Zoupanos | 0369f36776 | |
Spyros Zoupanos | 3c11acde0c | |
Spyros Zoupanos | a2d64b4644 | |
Spyros Zoupanos | 373f4fdbd8 | |
Spyros Zoupanos | 8a39ec44e0 | |
Spyros Zoupanos | 2b2bac9b28 | |
Spyros Zoupanos | 053588c365 | |
Spyros Zoupanos | 2e701c547d | |
Spyros Zoupanos | b3d51a954a | |
Spyros Zoupanos | ed4e9f46d9 | |
Spyros Zoupanos | 03fb2b9e01 | |
Spyros Zoupanos | 2ae67cfdba | |
Spyros Zoupanos | 958fb1a343 | |
Spyros Zoupanos | 1dcb197f02 | |
Spyros Zoupanos | 17f2748eb4 | |
Spyros Zoupanos | 17acbb7fc6 | |
Spyros Zoupanos | 49de94c4b1 | |
Spyros Zoupanos | 015f6e88df | |
Spyros Zoupanos | 8bb00add0d | |
Spyros Zoupanos | ba33df29b4 | |
Spyros Zoupanos | 55222a2516 | |
Spyros Zoupanos | 08a102a76c | |
Spyros Zoupanos | 3d5904fb41 | |
Spyros Zoupanos | 95fee808fd | |
Spyros Zoupanos | 196946cd6b | |
Spyros Zoupanos | f8e91cdc5c | |
Spyros Zoupanos | 9caac3e3e3 | |
Spyros Zoupanos | 8ddf1dcc15 | |
Spyros Zoupanos | 968d53f119 | |
Spyros Zoupanos | f78b5d3f86 | |
Spyros Zoupanos | 2d2d1b9694 | |
Spyros Zoupanos | 1d9f8f79a8 | |
Spyros Zoupanos | 398f1f6f15 | |
Spyros Zoupanos | 81102dd791 | |
Spyros Zoupanos | 719f9e3cd9 | |
Spyros Zoupanos | e2c70f64ed | |
Spyros Zoupanos | 5af2abbea5 | |
Spyros Zoupanos | cf7b9c6db3 | |
Spyros Zoupanos | 637e61bb0f | |
Spyros Zoupanos | d770d7043d | |
Spyros Zoupanos | 293d6accd4 | |
Spyros Zoupanos | f3dda9858c | |
Spyros Zoupanos | 8db9a7ccdc | |
Spyros Zoupanos | c035fa7648 | |
Spyros Zoupanos | 4c00343bbd | |
Spyros Zoupanos | b213da51c4 | |
Spyros Zoupanos | bf820a98b4 | |
Spyros Zoupanos | 9cdea87c7a | |
Spyros Zoupanos | 66c7ddfc5e | |
Spyros Zoupanos | 98ba2d0282 | |
Spyros Zoupanos | 0b6f302652 | |
Spyros Zoupanos | c0b509abfb | |
Spyros Zoupanos | cabe92d155 | |
Spyros Zoupanos | af62b14f91 |
|
@ -1,4 +1,5 @@
|
|||
.DS_Store
|
||||
._*
|
||||
.idea
|
||||
*.iws
|
||||
*.ipr
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dhp-build-assembly-resources</artifactId>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>dhp-build-properties-maven-plugin</artifactId>
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-code-style</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
|
||||
<packaging>jar</packaging>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-build</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -83,10 +83,6 @@
|
|||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
package eu.dnetlib.dhp.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/** Provides serializable and throwing extensions to standard functional interfaces. */
|
||||
|
@ -11,16 +10,6 @@ public class FunctionalInterfaceSupport {
|
|||
private FunctionalInterfaceSupport() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializable consumer of any kind of objects. To be used withing spark processing pipelines when supplying
|
||||
* functions externally.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface SerializableConsumer<T> extends Consumer<T>, Serializable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializable supplier of any kind of objects. To be used withing spark processing pipelines when supplying
|
||||
* functions externally.
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.common;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class PacePersonTest {
|
||||
|
||||
@Test
|
||||
public void pacePersonTest1() {
|
||||
|
||||
PacePerson p = new PacePerson("Artini, Michele", false);
|
||||
assertEquals("Artini", p.getSurnameString());
|
||||
assertEquals("Michele", p.getNameString());
|
||||
assertEquals("Artini, Michele", p.getNormalisedFullname());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pacePersonTest2() {
|
||||
PacePerson p = new PacePerson("Michele G. Artini", false);
|
||||
assertEquals("Artini, Michele G.", p.getNormalisedFullname());
|
||||
assertEquals("Michele G", p.getNameString());
|
||||
assertEquals("Artini", p.getSurnameString());
|
||||
}
|
||||
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
<relativePath>../</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -1,69 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.schema.common;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||
|
||||
public class LicenseComparator implements Comparator<Qualifier> {
|
||||
|
||||
@Override
|
||||
public int compare(Qualifier left, Qualifier right) {
|
||||
|
||||
if (left == null && right == null)
|
||||
return 0;
|
||||
if (left == null)
|
||||
return 1;
|
||||
if (right == null)
|
||||
return -1;
|
||||
|
||||
String lClass = left.getClassid();
|
||||
String rClass = right.getClassid();
|
||||
|
||||
if (lClass.equals(rClass))
|
||||
return 0;
|
||||
|
||||
if (lClass.equals("OPEN SOURCE"))
|
||||
return -1;
|
||||
if (rClass.equals("OPEN SOURCE"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("OPEN"))
|
||||
return -1;
|
||||
if (rClass.equals("OPEN"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("6MONTHS"))
|
||||
return -1;
|
||||
if (rClass.equals("6MONTHS"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("12MONTHS"))
|
||||
return -1;
|
||||
if (rClass.equals("12MONTHS"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("EMBARGO"))
|
||||
return -1;
|
||||
if (rClass.equals("EMBARGO"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("RESTRICTED"))
|
||||
return -1;
|
||||
if (rClass.equals("RESTRICTED"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("CLOSED"))
|
||||
return -1;
|
||||
if (rClass.equals("CLOSED"))
|
||||
return 1;
|
||||
|
||||
if (lClass.equals("UNKNOWN"))
|
||||
return -1;
|
||||
if (rClass.equals("UNKNOWN"))
|
||||
return 1;
|
||||
|
||||
// Else (but unlikely), lexicographical ordering will do.
|
||||
return lClass.compareTo(rClass);
|
||||
}
|
||||
}
|
|
@ -13,8 +13,6 @@ public class ModelConstants {
|
|||
public static final String DNET_DATA_CITE_DATE = "dnet:dataCite_date";
|
||||
public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource";
|
||||
public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions";
|
||||
public static final String DNET_COUNTRY_TYPE = "dnet:countries";
|
||||
public static final String DNET_REVIEW_LEVELS = "dnet:review_levels";
|
||||
|
||||
public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository";
|
||||
public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry";
|
||||
|
@ -26,10 +24,6 @@ public class ModelConstants {
|
|||
public static final String ORP_RESULTTYPE_CLASSID = "other";
|
||||
|
||||
public static final String RESULT_RESULT = "resultResult";
|
||||
/**
|
||||
* @deprecated Use {@link ModelConstants#RELATIONSHIP} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String PUBLICATION_DATASET = "publicationDataset";
|
||||
public static final String IS_RELATED_TO = "isRelatedTo";
|
||||
public static final String SUPPLEMENT = "supplement";
|
||||
|
@ -39,12 +33,6 @@ public class ModelConstants {
|
|||
public static final String IS_PART_OF = "IsPartOf";
|
||||
public static final String HAS_PARTS = "HasParts";
|
||||
public static final String RELATIONSHIP = "relationship";
|
||||
public static final String CITATION = "citation";
|
||||
public static final String CITES = "cites";
|
||||
public static final String IS_CITED_BY = "IsCitedBy";
|
||||
public static final String REVIEW = "review";
|
||||
public static final String REVIEWS = "reviews";
|
||||
public static final String IS_REVIEWED_BY = "IsReviewedBy";
|
||||
|
||||
public static final String RESULT_PROJECT = "resultProject";
|
||||
public static final String OUTCOME = "outcome";
|
||||
|
@ -61,16 +49,6 @@ public class ModelConstants {
|
|||
public static final String HAS_PARTICIPANT = "hasParticipant";
|
||||
public static final String IS_PARTICIPANT = "isParticipant";
|
||||
|
||||
public static final String RESULT_ORGANIZATION = "resultOrganization";
|
||||
public static final String AFFILIATION = "affiliation";
|
||||
public static final String IS_AUTHOR_INSTITUTION_OF = "isAuthorInstitutionOf";
|
||||
public static final String HAS_AUTHOR_INSTITUTION = "hasAuthorInstitution";
|
||||
|
||||
public static final String MERGES = "merges";
|
||||
|
||||
public static final String UNKNOWN = "UNKNOWN";
|
||||
public static final String NOT_AVAILABLE = "not available";
|
||||
|
||||
public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
|
||||
PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
|
||||
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||
|
|
|
@ -1,15 +1,10 @@
|
|||
|
||||
package eu.dnetlib.dhp.schema.common;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
@ -18,7 +13,7 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
|||
public class ModelSupport {
|
||||
|
||||
/** Defines the mapping between the actual entity type and the main entity type */
|
||||
private static Map<EntityType, MainEntityType> entityMapping = Maps.newHashMap();
|
||||
private static final Map<EntityType, MainEntityType> entityMapping = Maps.newHashMap();
|
||||
|
||||
static {
|
||||
entityMapping.put(EntityType.publication, MainEntityType.result);
|
||||
|
@ -58,253 +53,11 @@ public class ModelSupport {
|
|||
oafTypes.put("relation", Relation.class);
|
||||
}
|
||||
|
||||
public static final Map<Class, String> idPrefixMap = Maps.newHashMap();
|
||||
|
||||
static {
|
||||
idPrefixMap.put(Datasource.class, "10");
|
||||
idPrefixMap.put(Organization.class, "20");
|
||||
idPrefixMap.put(Project.class, "40");
|
||||
idPrefixMap.put(Dataset.class, "50");
|
||||
idPrefixMap.put(OtherResearchProduct.class, "50");
|
||||
idPrefixMap.put(Software.class, "50");
|
||||
idPrefixMap.put(Publication.class, "50");
|
||||
}
|
||||
|
||||
public static final Map<String, String> entityIdPrefix = Maps.newHashMap();
|
||||
|
||||
static {
|
||||
entityIdPrefix.put("datasource", "10");
|
||||
entityIdPrefix.put("organization", "20");
|
||||
entityIdPrefix.put("project", "40");
|
||||
entityIdPrefix.put("result", "50");
|
||||
}
|
||||
|
||||
public static final Map<String, RelationInverse> relationInverseMap = Maps.newHashMap();
|
||||
|
||||
static {
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personResult_authorship_isAuthorOf", new RelationInverse()
|
||||
.setRelation("isAuthorOf")
|
||||
.setInverse("hasAuthor")
|
||||
.setRelType("personResult")
|
||||
.setSubReltype("authorship"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personResult_authorship_hasAuthor", new RelationInverse()
|
||||
.setInverse("isAuthorOf")
|
||||
.setRelation("hasAuthor")
|
||||
.setRelType("personResult")
|
||||
.setSubReltype("authorship"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"projectOrganization_participation_isParticipant", new RelationInverse()
|
||||
.setRelation("isParticipant")
|
||||
.setInverse("hasParticipant")
|
||||
.setRelType("projectOrganization")
|
||||
.setSubReltype("participation"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"projectOrganization_participation_hasParticipant", new RelationInverse()
|
||||
.setInverse("isParticipant")
|
||||
.setRelation("hasParticipant")
|
||||
.setRelType("projectOrganization")
|
||||
.setSubReltype("participation"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultOrganization_affiliation_hasAuthorInstitution", new RelationInverse()
|
||||
.setRelation("hasAuthorInstitution")
|
||||
.setInverse("isAuthorInstitutionOf")
|
||||
.setRelType("resultOrganization")
|
||||
.setSubReltype("affiliation"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultOrganization_affiliation_isAuthorInstitutionOf", new RelationInverse()
|
||||
.setInverse("hasAuthorInstitution")
|
||||
.setRelation("isAuthorInstitutionOf")
|
||||
.setRelType("resultOrganization")
|
||||
.setSubReltype("affiliation"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"organizationOrganization_dedup_merges", new RelationInverse()
|
||||
.setRelation("merges")
|
||||
.setInverse("isMergedIn")
|
||||
.setRelType("organizationOrganization")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"organizationOrganization_dedup_isMergedIn", new RelationInverse()
|
||||
.setInverse("merges")
|
||||
.setRelation("isMergedIn")
|
||||
.setRelType("organizationOrganization")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"organizationOrganization_dedupSimilarity_isSimilarTo", new RelationInverse()
|
||||
.setInverse("isSimilarTo")
|
||||
.setRelation("isSimilarTo")
|
||||
.setRelType("organizationOrganization")
|
||||
.setSubReltype("dedupSimilarity"));
|
||||
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultProject_outcome_isProducedBy", new RelationInverse()
|
||||
.setRelation("isProducedBy")
|
||||
.setInverse("produces")
|
||||
.setRelType("resultProject")
|
||||
.setSubReltype("outcome"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultProject_outcome_produces", new RelationInverse()
|
||||
.setInverse("isProducedBy")
|
||||
.setRelation("produces")
|
||||
.setRelType("resultProject")
|
||||
.setSubReltype("outcome"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"projectPerson_contactPerson_isContact", new RelationInverse()
|
||||
.setRelation("isContact")
|
||||
.setInverse("hasContact")
|
||||
.setRelType("projectPerson")
|
||||
.setSubReltype("contactPerson"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"projectPerson_contactPerson_hasContact", new RelationInverse()
|
||||
.setInverse("isContact")
|
||||
.setRelation("hasContact")
|
||||
.setRelType("personPerson")
|
||||
.setSubReltype("coAuthorship"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personPerson_coAuthorship_isCoauthorOf", new RelationInverse()
|
||||
.setInverse("isCoAuthorOf")
|
||||
.setRelation("isCoAuthorOf")
|
||||
.setRelType("personPerson")
|
||||
.setSubReltype("coAuthorship"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personPerson_dedup_merges", new RelationInverse()
|
||||
.setInverse("isMergedIn")
|
||||
.setRelation("merges")
|
||||
.setRelType("personPerson")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personPerson_dedup_isMergedIn", new RelationInverse()
|
||||
.setInverse("merges")
|
||||
.setRelation("isMergedIn")
|
||||
.setRelType("personPerson")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"personPerson_dedupSimilarity_isSimilarTo", new RelationInverse()
|
||||
.setInverse("isSimilarTo")
|
||||
.setRelation("isSimilarTo")
|
||||
.setRelType("personPerson")
|
||||
.setSubReltype("dedupSimilarity"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"datasourceOrganization_provision_isProvidedBy", new RelationInverse()
|
||||
.setInverse("provides")
|
||||
.setRelation("isProvidedBy")
|
||||
.setRelType("datasourceOrganization")
|
||||
.setSubReltype("provision"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"datasourceOrganization_provision_provides", new RelationInverse()
|
||||
.setInverse("isProvidedBy")
|
||||
.setRelation("provides")
|
||||
.setRelType("datasourceOrganization")
|
||||
.setSubReltype("provision"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_similarity_hasAmongTopNSimilarDocuments", new RelationInverse()
|
||||
.setInverse("isAmongTopNSimilarDocuments")
|
||||
.setRelation("hasAmongTopNSimilarDocuments")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("similarity"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_similarity_isAmongTopNSimilarDocuments", new RelationInverse()
|
||||
.setInverse("hasAmongTopNSimilarDocuments")
|
||||
.setRelation("isAmongTopNSimilarDocuments")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("similarity"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_relationship_isRelatedTo", new RelationInverse()
|
||||
.setInverse("isRelatedTo")
|
||||
.setRelation("isRelatedTo")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("relationship"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_similarity_isAmongTopNSimilarDocuments", new RelationInverse()
|
||||
.setInverse("hasAmongTopNSimilarDocuments")
|
||||
.setRelation("isAmongTopNSimilarDocuments")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("similarity"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_supplement_isSupplementTo", new RelationInverse()
|
||||
.setInverse("isSupplementedBy")
|
||||
.setRelation("isSupplementTo")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("supplement"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_supplement_isSupplementedBy", new RelationInverse()
|
||||
.setInverse("isSupplementTo")
|
||||
.setRelation("isSupplementedBy")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("supplement"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_part_isPartOf", new RelationInverse()
|
||||
.setInverse("hasPart")
|
||||
.setRelation("isPartOf")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("part"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_part_hasPart", new RelationInverse()
|
||||
.setInverse("isPartOf")
|
||||
.setRelation("hasPart")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("part"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_dedup_merges", new RelationInverse()
|
||||
.setInverse("isMergedIn")
|
||||
.setRelation("merges")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_dedup_isMergedIn", new RelationInverse()
|
||||
.setInverse("merges")
|
||||
.setRelation("isMergedIn")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("dedup"));
|
||||
relationInverseMap
|
||||
.put(
|
||||
"resultResult_dedupSimilarity_isSimilarTo", new RelationInverse()
|
||||
.setInverse("isSimilarTo")
|
||||
.setRelation("isSimilarTo")
|
||||
.setRelType("resultResult")
|
||||
.setSubReltype("dedupSimilarity"));
|
||||
|
||||
}
|
||||
|
||||
private static final String schemeTemplate = "dnet:%s_%s_relations";
|
||||
|
||||
private ModelSupport() {
|
||||
}
|
||||
|
||||
public static <E extends OafEntity> String getIdPrefix(Class<E> clazz) {
|
||||
return idPrefixMap.get(clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks subclass-superclass relationship.
|
||||
*
|
||||
|
@ -400,21 +153,6 @@ public class ModelSupport {
|
|||
entityMapping.get(EntityType.valueOf(targetType)).name());
|
||||
}
|
||||
|
||||
public static <T extends Oaf> String tableIdentifier(String dbName, String tableName) {
|
||||
|
||||
checkArgument(StringUtils.isNotBlank(dbName), "DB name cannot be empty");
|
||||
checkArgument(StringUtils.isNotBlank(tableName), "table name cannot be empty");
|
||||
|
||||
return String.format("%s.%s", dbName, tableName);
|
||||
}
|
||||
|
||||
public static <T extends Oaf> String tableIdentifier(String dbName, Class<T> clazz) {
|
||||
|
||||
checkArgument(Objects.nonNull(clazz), "clazz is needed to derive the table name, thus cannot be null");
|
||||
|
||||
return tableIdentifier(dbName, clazz.getSimpleName().toLowerCase());
|
||||
}
|
||||
|
||||
public static <T extends Oaf> Function<T, String> idFn() {
|
||||
return x -> {
|
||||
if (isSubClass(x, Relation.class)) {
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.schema.common;
|
||||
|
||||
public class RelationInverse {
|
||||
private String relation;
|
||||
private String inverse;
|
||||
private String relType;
|
||||
private String subReltype;
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
||||
public RelationInverse setRelType(String relType) {
|
||||
this.relType = relType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getSubReltype() {
|
||||
return subReltype;
|
||||
}
|
||||
|
||||
public RelationInverse setSubReltype(String subReltype) {
|
||||
this.subReltype = subReltype;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getRelation() {
|
||||
return relation;
|
||||
}
|
||||
|
||||
public RelationInverse setRelation(String relation) {
|
||||
this.relation = relation;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getInverse() {
|
||||
return inverse;
|
||||
}
|
||||
|
||||
public RelationInverse setInverse(String inverse) {
|
||||
this.inverse = inverse;
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
|
@ -2,7 +2,8 @@
|
|||
package eu.dnetlib.dhp.schema.oaf;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Author implements Serializable {
|
||||
|
||||
|
@ -85,5 +86,4 @@ public class Author implements Serializable {
|
|||
public int hashCode() {
|
||||
return Objects.hash(fullname, name, surname, rank, pid, affiliation);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ public class DataInfo implements Serializable {
|
|||
|
||||
private Boolean invisible = false;
|
||||
private Boolean inferred;
|
||||
private Boolean deletedbyinference = false;
|
||||
private Boolean deletedbyinference;
|
||||
private String trust;
|
||||
private String inferenceprovenance;
|
||||
private Qualifier provenanceaction;
|
||||
|
|
|
@ -10,7 +10,6 @@ public class Dataset extends Result implements Serializable {
|
|||
|
||||
private Field<String> storagedate;
|
||||
|
||||
// candidate for removal
|
||||
private Field<String> device;
|
||||
|
||||
private Field<String> size;
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
package eu.dnetlib.dhp.schema.oaf;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class Field<T> implements Serializable {
|
||||
|
||||
|
@ -40,6 +39,6 @@ public class Field<T> implements Serializable {
|
|||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Field<T> other = (Field<T>) obj;
|
||||
return Objects.equals(getValue(), other.getValue());
|
||||
return getValue().equals(other.getValue());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class Instance implements Serializable {
|
|||
// typed results
|
||||
private Field<String> processingchargecurrency;
|
||||
|
||||
private Qualifier refereed; // peer-review status
|
||||
private Field<String> refereed; // peer-review status
|
||||
|
||||
public Field<String> getLicense() {
|
||||
return license;
|
||||
|
@ -113,11 +113,11 @@ public class Instance implements Serializable {
|
|||
this.processingchargecurrency = processingchargecurrency;
|
||||
}
|
||||
|
||||
public Qualifier getRefereed() {
|
||||
public Field<String> getRefereed() {
|
||||
return refereed;
|
||||
}
|
||||
|
||||
public void setRefereed(Qualifier refereed) {
|
||||
public void setRefereed(Field<String> refereed) {
|
||||
this.refereed = refereed;
|
||||
}
|
||||
|
||||
|
|
|
@ -106,7 +106,6 @@ public abstract class OafEntity extends Oaf implements Serializable {
|
|||
.stream(lists)
|
||||
.filter(Objects::nonNull)
|
||||
.flatMap(List::stream)
|
||||
.filter(Objects::nonNull)
|
||||
.distinct()
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
|
|
@ -41,12 +41,6 @@ public class Relation extends Oaf {
|
|||
*/
|
||||
private String target;
|
||||
|
||||
/**
|
||||
* List of relation specific properties. Values include 'similarityLevel', indicating the similarity score between a
|
||||
* pair of publications.
|
||||
*/
|
||||
private List<KeyValue> properties = new ArrayList<>();
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
@ -87,14 +81,6 @@ public class Relation extends Oaf {
|
|||
this.target = target;
|
||||
}
|
||||
|
||||
public List<KeyValue> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public void setProperties(List<KeyValue> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public void mergeFrom(final Relation r) {
|
||||
|
||||
checkArgument(Objects.equals(getSource(), r.getSource()), "source ids must be equal");
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
package eu.dnetlib.dhp.schema.oaf;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class Result extends OafEntity implements Serializable {
|
||||
|
||||
|
@ -246,33 +244,7 @@ public class Result extends OafEntity implements Serializable {
|
|||
|
||||
subject = mergeLists(subject, r.getSubject());
|
||||
|
||||
// merge title lists: main title with higher trust and distinct between the others
|
||||
StructuredProperty baseMainTitle = null;
|
||||
if (title != null) {
|
||||
baseMainTitle = getMainTitle(title);
|
||||
if (baseMainTitle != null) {
|
||||
final StructuredProperty p = baseMainTitle;
|
||||
title = title.stream().filter(t -> t != p).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
StructuredProperty newMainTitle = null;
|
||||
if (r.getTitle() != null) {
|
||||
newMainTitle = getMainTitle(r.getTitle());
|
||||
if (newMainTitle != null && title != null) {
|
||||
final StructuredProperty p = newMainTitle;
|
||||
title = title.stream().filter(t -> t != p).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
if (newMainTitle != null && compareTrust(this, r) < 0) {
|
||||
baseMainTitle = newMainTitle;
|
||||
}
|
||||
|
||||
title = mergeLists(title, r.getTitle());
|
||||
if (title != null && baseMainTitle != null) {
|
||||
title.add(baseMainTitle);
|
||||
}
|
||||
|
||||
relevantdate = mergeLists(relevantdate, r.getRelevantdate());
|
||||
|
||||
|
@ -322,15 +294,4 @@ public class Result extends OafEntity implements Serializable {
|
|||
}
|
||||
return a.size() > b.size() ? a : b;
|
||||
}
|
||||
|
||||
private StructuredProperty getMainTitle(List<StructuredProperty> titles) {
|
||||
// need to check if the list of titles contains more than 1 main title? (in that case, we should chose which
|
||||
// main title select in the list)
|
||||
for (StructuredProperty title : titles) {
|
||||
if (title.getQualifier() != null && title.getQualifier().getClassid() != null)
|
||||
if (title.getQualifier().getClassid().equals("main title"))
|
||||
return title;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,10 +10,8 @@ public class Software extends Result implements Serializable {
|
|||
|
||||
private List<Field<String>> documentationUrl;
|
||||
|
||||
// candidate for removal
|
||||
private List<StructuredProperty> license;
|
||||
|
||||
// candidate for removal
|
||||
private Field<String> codeRepositoryUrl;
|
||||
|
||||
private Qualifier programmingLanguage;
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-actionmanager</artifactId>
|
||||
|
||||
|
|
|
@ -84,11 +84,8 @@ public class MigrateActionSet {
|
|||
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
||||
log
|
||||
.info(
|
||||
"paths to process:\n{}", sourcePaths
|
||||
.stream()
|
||||
.map(p -> p.toString())
|
||||
.collect(Collectors.joining("\n")));
|
||||
|
||||
"paths to process:\n{}",
|
||||
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
||||
for (Path source : sourcePaths) {
|
||||
|
||||
if (!sourceFS.exists(source)) {
|
||||
|
@ -122,8 +119,9 @@ public class MigrateActionSet {
|
|||
}
|
||||
}
|
||||
|
||||
final String targetPathsCsv = targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(","));
|
||||
props.setProperty(TARGET_PATHS, targetPathsCsv);
|
||||
props
|
||||
.setProperty(
|
||||
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||
|
||||
try (OutputStream os = new FileOutputStream(file)) {
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
|
||||
package eu.dnetlib.dhp.actionmanager.migration;
|
||||
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||
import static eu.dnetlib.data.proto.KindProtos.Kind.entity;
|
||||
import static eu.dnetlib.data.proto.KindProtos.Kind.relation;
|
||||
import static eu.dnetlib.data.proto.TypeProtos.*;
|
||||
import static eu.dnetlib.data.proto.TypeProtos.Type.*;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -19,6 +21,10 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
|||
|
||||
public class ProtoConverter implements Serializable {
|
||||
|
||||
public static final String UNKNOWN = "UNKNOWN";
|
||||
public static final String NOT_AVAILABLE = "not available";
|
||||
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
|
||||
|
||||
public static Oaf convert(OafProtos.Oaf oaf) {
|
||||
try {
|
||||
switch (oaf.getKind()) {
|
||||
|
@ -58,7 +64,6 @@ public class ProtoConverter implements Serializable {
|
|||
case result:
|
||||
final Result r = convertResult(oaf);
|
||||
r.setInstance(convertInstances(oaf));
|
||||
r.setExternalReference(convertExternalRefs(oaf));
|
||||
return r;
|
||||
case project:
|
||||
return convertProject(oaf);
|
||||
|
@ -89,53 +94,13 @@ public class ProtoConverter implements Serializable {
|
|||
i.setHostedby(mapKV(ri.getHostedby()));
|
||||
i.setInstancetype(mapQualifier(ri.getInstancetype()));
|
||||
i.setLicense(mapStringField(ri.getLicense()));
|
||||
i
|
||||
.setUrl(
|
||||
ri.getUrlList() != null ? ri
|
||||
.getUrlList()
|
||||
.stream()
|
||||
.distinct()
|
||||
.collect(Collectors.toCollection(ArrayList::new)) : null);
|
||||
i.setRefereed(mapRefereed(ri.getRefereed()));
|
||||
i.setUrl(ri.getUrlList());
|
||||
i.setRefereed(mapStringField(ri.getRefereed()));
|
||||
i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount()));
|
||||
i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency()));
|
||||
return i;
|
||||
}
|
||||
|
||||
private static Qualifier mapRefereed(FieldTypeProtos.StringField refereed) {
|
||||
Qualifier q = new Qualifier();
|
||||
q.setClassid(refereed.getValue());
|
||||
q.setSchemename(refereed.getValue());
|
||||
q.setSchemeid("dnet:review_levels");
|
||||
q.setSchemename("dnet:review_levels");
|
||||
return q;
|
||||
}
|
||||
|
||||
private static List<ExternalReference> convertExternalRefs(OafProtos.Oaf oaf) {
|
||||
ResultProtos.Result r = oaf.getEntity().getResult();
|
||||
if (r.getExternalReferenceCount() > 0) {
|
||||
return r
|
||||
.getExternalReferenceList()
|
||||
.stream()
|
||||
.map(e -> convertExtRef(e))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
private static ExternalReference convertExtRef(ResultProtos.Result.ExternalReference e) {
|
||||
ExternalReference ex = new ExternalReference();
|
||||
ex.setUrl(e.getUrl());
|
||||
ex.setSitename(e.getSitename());
|
||||
ex.setRefidentifier(e.getRefidentifier());
|
||||
ex.setQuery(e.getQuery());
|
||||
ex.setQualifier(mapQualifier(e.getQualifier()));
|
||||
ex.setLabel(e.getLabel());
|
||||
ex.setDescription(e.getDescription());
|
||||
ex.setDataInfo(ex.getDataInfo());
|
||||
return ex;
|
||||
}
|
||||
|
||||
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
||||
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
|
||||
final Organization org = setOaf(new Organization(), oaf);
|
||||
|
@ -532,9 +497,7 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
private static Context mapContext(ResultProtos.Result.Context context) {
|
||||
if (context == null || StringUtils.isBlank(context.getId())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final Context entity = new Context();
|
||||
entity.setId(context.getId());
|
||||
entity
|
||||
|
@ -548,10 +511,6 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
public static KeyValue mapKV(FieldTypeProtos.KeyValue kv) {
|
||||
if (kv == null || StringUtils.isBlank(kv.getKey()) & StringUtils.isBlank(kv.getValue())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final KeyValue keyValue = new KeyValue();
|
||||
keyValue.setKey(kv.getKey());
|
||||
keyValue.setValue(kv.getValue());
|
||||
|
@ -590,10 +549,6 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
public static StructuredProperty mapStructuredProperty(FieldTypeProtos.StructuredProperty sp) {
|
||||
if (sp == null | StringUtils.isBlank(sp.getValue())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final StructuredProperty structuredProperty = new StructuredProperty();
|
||||
structuredProperty.setValue(sp.getValue());
|
||||
structuredProperty.setQualifier(mapQualifier(sp.getQualifier()));
|
||||
|
@ -630,10 +585,6 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
public static Field<String> mapStringField(FieldTypeProtos.StringField s) {
|
||||
if (s == null || StringUtils.isBlank(s.getValue())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final Field<String> stringField = new Field<>();
|
||||
stringField.setValue(s.getValue());
|
||||
stringField.setDataInfo(mapDataInfo(s.getDataInfo()));
|
||||
|
@ -641,16 +592,19 @@ public class ProtoConverter implements Serializable {
|
|||
}
|
||||
|
||||
public static Field<Boolean> mapBoolField(FieldTypeProtos.BoolField b) {
|
||||
if (b == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final Field<Boolean> booleanField = new Field<>();
|
||||
booleanField.setValue(b.getValue());
|
||||
booleanField.setDataInfo(mapDataInfo(b.getDataInfo()));
|
||||
return booleanField;
|
||||
}
|
||||
|
||||
public static Field<Integer> mapIntField(FieldTypeProtos.IntField b) {
|
||||
final Field<Integer> entity = new Field<>();
|
||||
entity.setValue(b.getValue());
|
||||
entity.setDataInfo(mapDataInfo(b.getDataInfo()));
|
||||
return entity;
|
||||
}
|
||||
|
||||
public static Journal mapJournal(FieldTypeProtos.Journal j) {
|
||||
final Journal journal = new Journal();
|
||||
journal.setConferencedate(j.getConferencedate());
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>dhp-aggregation</artifactId>
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import java.io.File;
|
|||
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
@ -20,7 +19,6 @@ import eu.dnetlib.dhp.collection.worker.utils.CollectorPluginFactory;
|
|||
import eu.dnetlib.message.Message;
|
||||
import eu.dnetlib.message.MessageManager;
|
||||
|
||||
@Disabled
|
||||
public class DnetCollectorWorkerApplicationTests {
|
||||
|
||||
private final ArgumentApplicationParser argumentParser = mock(ArgumentApplicationParser.class);
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>dhp-blacklist</artifactId>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-schemas</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.11</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_2.11</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
</project>
|
|
@ -1,94 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.blacklist;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
|
||||
public class PrepareMergedRelationJob {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(PrepareMergedRelationJob.class);
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
PrepareMergedRelationJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/blacklist/input_preparerelation_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {} ", outputPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
||||
|
||||
runWithSparkHiveSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
removeOutputDir(spark, outputPath);
|
||||
selectMergesRelations(
|
||||
spark,
|
||||
inputPath,
|
||||
outputPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static void selectMergesRelations(SparkSession spark, String inputPath, String outputPath) {
|
||||
|
||||
Dataset<Relation> relation = readRelations(spark, inputPath);
|
||||
|
||||
relation
|
||||
.filter("relclass = 'merges' and datainfo.deletedbyinference=false")
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
public static org.apache.spark.sql.Dataset<Relation> readRelations(
|
||||
SparkSession spark, String inputPath) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map(
|
||||
(MapFunction<String, Relation>) value -> OBJECT_MAPPER.readValue(value, Relation.class),
|
||||
Encoders.bean(Relation.class));
|
||||
}
|
||||
|
||||
private static void removeOutputDir(SparkSession spark, String path) {
|
||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,141 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.blacklist;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.DbClient;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.common.RelationInverse;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
|
||||
public class ReadBlacklistFromDB implements Closeable {
|
||||
|
||||
private final DbClient dbClient;
|
||||
private static final Log log = LogFactory.getLog(ReadBlacklistFromDB.class);
|
||||
private final Configuration conf;
|
||||
private final BufferedWriter writer;
|
||||
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private final static String query = "SELECT source_type, unnest(original_source_objects) as source, " +
|
||||
"target_type, unnest(original_target_objects) as target, " +
|
||||
"relationship FROM blacklist WHERE status = 'ACCEPTED'";
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
ReadBlacklistFromDB.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/blacklist/blacklist_parameters.json")));
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String dbUrl = parser.get("postgresUrl");
|
||||
final String dbUser = parser.get("postgresUser");
|
||||
final String dbPassword = parser.get("postgresPassword");
|
||||
final String hdfsPath = parser.get("hdfsPath") + "/blacklist";
|
||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||
|
||||
try (final ReadBlacklistFromDB rbl = new ReadBlacklistFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
|
||||
dbPassword)) {
|
||||
|
||||
log.info("Processing blacklist...");
|
||||
rbl.execute(query, rbl::processBlacklistEntry);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void execute(final String sql, final Function<ResultSet, List<Relation>> producer) throws Exception {
|
||||
|
||||
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeRelation(r));
|
||||
|
||||
dbClient.processResults(sql, consumer);
|
||||
}
|
||||
|
||||
public List<Relation> processBlacklistEntry(ResultSet rs) {
|
||||
try {
|
||||
Relation direct = new Relation();
|
||||
Relation inverse = new Relation();
|
||||
|
||||
String source_prefix = ModelSupport.entityIdPrefix.get(rs.getString("source_type"));
|
||||
String target_prefix = ModelSupport.entityIdPrefix.get(rs.getString("target_type"));
|
||||
|
||||
String source_direct = source_prefix + "|" + rs.getString("source");
|
||||
direct.setSource(source_direct);
|
||||
inverse.setTarget(source_direct);
|
||||
|
||||
String target_direct = target_prefix + "|" + rs.getString("target");
|
||||
direct.setTarget(target_direct);
|
||||
inverse.setSource(target_direct);
|
||||
|
||||
String encoding = rs.getString("relationship");
|
||||
RelationInverse ri = ModelSupport.relationInverseMap.get(encoding);
|
||||
direct.setRelClass(ri.getRelation());
|
||||
inverse.setRelClass(ri.getInverse());
|
||||
direct.setRelType(ri.getRelType());
|
||||
inverse.setRelType(ri.getRelType());
|
||||
direct.setSubRelType(ri.getSubReltype());
|
||||
inverse.setSubRelType(ri.getSubReltype());
|
||||
|
||||
return Arrays.asList(direct, inverse);
|
||||
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
dbClient.close();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public ReadBlacklistFromDB(
|
||||
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
|
||||
throws Exception {
|
||||
|
||||
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
|
||||
this.conf = new Configuration();
|
||||
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||
Path hdfsWritePath = new Path(hdfsPath);
|
||||
FSDataOutputStream fsDataOutputStream = null;
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||
} else {
|
||||
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||
}
|
||||
|
||||
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
protected void writeRelation(final Relation r) {
|
||||
try {
|
||||
writer.write(OBJECT_MAPPER.writeValueAsString(r));
|
||||
writer.newLine();
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,145 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.blacklist;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.*;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class SparkRemoveBlacklistedRelationJob {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkRemoveBlacklistedRelationJob.class);
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkRemoveBlacklistedRelationJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/blacklist/sparkblacklist_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath {}: ", outputPath);
|
||||
|
||||
final String blacklistPath = parser.get("hdfsPath");
|
||||
log.info("blacklistPath {}: ", blacklistPath);
|
||||
|
||||
final String mergesPath = parser.get("mergesPath");
|
||||
log.info("mergesPath {}: ", mergesPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
removeOutputDir(spark, outputPath);
|
||||
removeBlacklistedRelations(
|
||||
spark,
|
||||
blacklistPath,
|
||||
inputPath,
|
||||
outputPath,
|
||||
mergesPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static void removeBlacklistedRelations(SparkSession spark, String blacklistPath, String inputPath,
|
||||
String outputPath, String mergesPath) {
|
||||
Dataset<Relation> blackListed = readRelations(spark, blacklistPath + "/blacklist");
|
||||
Dataset<Relation> inputRelation = readRelations(spark, inputPath);
|
||||
Dataset<Relation> mergesRelation = readRelations(spark, mergesPath);
|
||||
|
||||
Dataset<Relation> dedupSource = blackListed
|
||||
.joinWith(
|
||||
mergesRelation, blackListed.col("source").equalTo(mergesRelation.col("target")),
|
||||
"left_outer")
|
||||
.map((MapFunction<Tuple2<Relation, Relation>, Relation>) c -> {
|
||||
Optional
|
||||
.ofNullable(c._2())
|
||||
.ifPresent(mr -> c._1().setSource(mr.getSource()));
|
||||
return c._1();
|
||||
}, Encoders.bean(Relation.class));
|
||||
|
||||
Dataset<Relation> dedupBL = dedupSource
|
||||
.joinWith(
|
||||
mergesRelation, dedupSource.col("target").equalTo(mergesRelation.col("target")),
|
||||
"left_outer")
|
||||
.map((MapFunction<Tuple2<Relation, Relation>, Relation>) c -> {
|
||||
Optional
|
||||
.ofNullable(c._2())
|
||||
.ifPresent(mr -> c._1().setTarget(mr.getSource()));
|
||||
return c._1();
|
||||
}, Encoders.bean(Relation.class));
|
||||
|
||||
inputRelation
|
||||
.joinWith(
|
||||
dedupBL, (inputRelation
|
||||
.col("source")
|
||||
.equalTo(dedupBL.col("source"))
|
||||
.and(
|
||||
inputRelation
|
||||
.col("target")
|
||||
.equalTo(dedupBL.col("target")))),
|
||||
"left_outer")
|
||||
.map((MapFunction<Tuple2<Relation, Relation>, Relation>) c -> {
|
||||
Relation ir = c._1();
|
||||
Optional<Relation> obl = Optional.ofNullable(c._2());
|
||||
if (obl.isPresent()) {
|
||||
if (ir.equals(obl.get())) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return ir;
|
||||
}, Encoders.bean(Relation.class))
|
||||
.filter(Objects::nonNull)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
public static org.apache.spark.sql.Dataset<Relation> readRelations(
|
||||
SparkSession spark, String inputPath) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map(
|
||||
(MapFunction<String, Relation>) value -> OBJECT_MAPPER.readValue(value, Relation.class),
|
||||
Encoders.bean(Relation.class));
|
||||
}
|
||||
|
||||
private static void removeOutputDir(SparkSession spark, String path) {
|
||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName": "p",
|
||||
"paramLongName": "hdfsPath",
|
||||
"paramDescription": "the path where storing the sequential file",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "nn",
|
||||
"paramLongName": "hdfsNameNode",
|
||||
"paramDescription": "the name node on hdfs",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "pgurl",
|
||||
"paramLongName": "postgresUrl",
|
||||
"paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "pguser",
|
||||
"paramLongName": "postgresUser",
|
||||
"paramDescription": "postgres user",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "pgpasswd",
|
||||
"paramLongName": "postgresPassword",
|
||||
"paramDescription": "postgres password",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -1,26 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName": "s",
|
||||
"paramLongName": "sourcePath",
|
||||
"paramDescription": "the path to the graph used to remove the relations ",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path where to store the temporary result ",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "issm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"h",
|
||||
"paramLongName":"hive_metastore_uris",
|
||||
"paramDescription": "the hive metastore uris",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
|
@ -1,54 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>spark2</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hive_metastore_uris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<value>http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorNumber</name>
|
||||
<value>4</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<value>/user/spark/spark2ApplicationHistory</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<value>15G</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<value>6G</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<value>1</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,200 +0,0 @@
|
|||
<workflow-app name="blacklist_relations" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<property>
|
||||
<name>postgresURL</name>
|
||||
<description>the url of the postgress server to query</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresUser</name>
|
||||
<description>the username to access the postgres db</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresPassword</name>
|
||||
<description>the postgres password</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the graph output path</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="reset_outputpath"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="copy_entities"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="copy_entities">
|
||||
<path start="copy_publication"/>
|
||||
<path start="copy_dataset"/>
|
||||
<path start="copy_orp"/>
|
||||
<path start="copy_software"/>
|
||||
<path start="copy_datasource"/>
|
||||
<path start="copy_project"/>
|
||||
<path start="copy_organization"/>
|
||||
</fork>
|
||||
|
||||
<action name="copy_publication">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/publication</arg>
|
||||
<arg>${nameNode}/${outputPath}/publication</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_dataset">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/dataset</arg>
|
||||
<arg>${nameNode}/${outputPath}/dataset</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_orp">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>${nameNode}/${outputPath}/otherresearchproduct</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_software">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/software</arg>
|
||||
<arg>${nameNode}/${outputPath}/software</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_organization">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/organization</arg>
|
||||
<arg>${nameNode}/${outputPath}/organization</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_project">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/project</arg>
|
||||
<arg>${nameNode}/${outputPath}/project</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="copy_datasource">
|
||||
<distcp xmlns="uri:oozie:distcp-action:0.2">
|
||||
<arg>${nameNode}/${sourcePath}/datasource</arg>
|
||||
<arg>${nameNode}/${outputPath}/datasource</arg>
|
||||
</distcp>
|
||||
<ok to="wait"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="wait" to="read_blacklist"/>
|
||||
|
||||
<action name="read_blacklist">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.blacklist.ReadBlacklistFromDB</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
|
||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||
</java>
|
||||
<ok to="prepare_merged_relation"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="prepare_merged_relation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>PrepareMergedRelation</name>
|
||||
<class>eu.dnetlib.dhp.blacklist.PrepareMergedRelationJob</class>
|
||||
<jar>dhp-blacklist-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/mergesRelation</arg>
|
||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||
</spark>
|
||||
<ok to="apply_blacklist"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="apply_blacklist">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>ApplyBlacklist</name>
|
||||
<class>eu.dnetlib.dhp.blacklist.SparkRemoveBlacklistedRelationJob</class>
|
||||
<jar>dhp-blacklist-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/relation</arg>
|
||||
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
|
||||
<arg>--mergesPath</arg><arg>${workingDir}/mergesRelation</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -1,33 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName": "p",
|
||||
"paramLongName": "hdfsPath",
|
||||
"paramDescription": "the path where storing the sequential file",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "s",
|
||||
"paramLongName": "sourcePath",
|
||||
"paramDescription": "the path to the graph used to remove the relations ",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path where to store the temporary result ",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "issm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "m",
|
||||
"paramLongName": "mergesPath",
|
||||
"paramDescription": "true if the spark session is managed",
|
||||
"paramRequired": true
|
||||
|
||||
}
|
||||
]
|
|
@ -1,161 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.blacklist;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
|
||||
public class BlackListTest {
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private static final ClassLoader cl = eu.dnetlib.dhp.blacklist.BlackListTest.class.getClassLoader();
|
||||
|
||||
private static SparkSession spark;
|
||||
|
||||
private static Path workingDir;
|
||||
private static final Logger log = LoggerFactory.getLogger(eu.dnetlib.dhp.blacklist.BlackListTest.class);
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
workingDir = Files.createTempDirectory(eu.dnetlib.dhp.blacklist.BlackListTest.class.getSimpleName());
|
||||
log.info("using work dir {}", workingDir);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.setAppName(eu.dnetlib.dhp.blacklist.BlackListTest.class.getSimpleName());
|
||||
|
||||
conf.setMaster("local[*]");
|
||||
conf.set("spark.driver.host", "localhost");
|
||||
conf.set("hive.metastore.local", "true");
|
||||
conf.set("spark.ui.enabled", "false");
|
||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||
|
||||
spark = SparkSession
|
||||
.builder()
|
||||
.appName(BlackListTest.class.getSimpleName())
|
||||
.config(conf)
|
||||
.getOrCreate();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterAll() throws IOException {
|
||||
FileUtils.deleteDirectory(workingDir.toFile());
|
||||
spark.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noRemoveTest() throws Exception {
|
||||
SparkRemoveBlacklistedRelationJob
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged",
|
||||
Boolean.FALSE.toString(),
|
||||
"-sourcePath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/relationsNoRemoval").getPath(),
|
||||
"-outputPath",
|
||||
workingDir.toString() + "/relation",
|
||||
"-hdfsPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/blacklist").getPath(),
|
||||
"-mergesPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/mergesRel").getPath(),
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<Relation> tmp = sc
|
||||
.textFile(workingDir.toString() + "/relation")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||
|
||||
Assertions.assertEquals(13, tmp.count());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void removeNoMergeMatchTest() throws Exception {
|
||||
SparkRemoveBlacklistedRelationJob
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged",
|
||||
Boolean.FALSE.toString(),
|
||||
"-sourcePath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/relationsOneRemoval").getPath(),
|
||||
"-outputPath",
|
||||
workingDir.toString() + "/relation",
|
||||
"-hdfsPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/blacklist").getPath(),
|
||||
"-mergesPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/mergesRel").getPath(),
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<Relation> tmp = sc
|
||||
.textFile(workingDir.toString() + "/relation")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||
|
||||
Assertions.assertEquals(12, tmp.count());
|
||||
|
||||
org.apache.spark.sql.Dataset<eu.dnetlib.dhp.schema.oaf.Relation> verificationDataset = spark
|
||||
.createDataset(tmp.rdd(), Encoders.bean(eu.dnetlib.dhp.schema.oaf.Relation.class));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
0, verificationDataset
|
||||
.filter(
|
||||
"source = '40|corda__h2020::5161f53ab205d803c36b4c888fe7deef' and " +
|
||||
"target = '20|dedup_wf_001::157af406bc653aa4d9749318b644de43'")
|
||||
.count());
|
||||
|
||||
Assertions.assertEquals(0, verificationDataset.filter("relClass = 'hasParticipant'").count());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void removeMergeMatchTest() throws Exception {
|
||||
SparkRemoveBlacklistedRelationJob
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged",
|
||||
Boolean.FALSE.toString(),
|
||||
"-sourcePath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/relationOneRemovalWithMatch").getPath(),
|
||||
"-outputPath",
|
||||
workingDir.toString() + "/relation",
|
||||
"-hdfsPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/blacklist").getPath(),
|
||||
"-mergesPath",
|
||||
getClass().getResource("/eu/dnetlib/dhp/blacklist/mergesRelOneMerge").getPath(),
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<Relation> tmp = sc
|
||||
.textFile(workingDir.toString() + "/relation")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||
|
||||
Assertions.assertEquals(12, tmp.count());
|
||||
|
||||
org.apache.spark.sql.Dataset<eu.dnetlib.dhp.schema.oaf.Relation> verificationDataset = spark
|
||||
.createDataset(tmp.rdd(), Encoders.bean(eu.dnetlib.dhp.schema.oaf.Relation.class));
|
||||
|
||||
Assertions.assertEquals(12, verificationDataset.filter("relClass = 'isProvidedBy'").count());
|
||||
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"projectOrganization","subRelType":"participation","relClass":"hasParticipant","source":"40|corda__h2020::5161f53ab205d803c36b4c888fe7deef","target":"20|dedup_wf_001::157af406bc653aa4d9749318b644de43"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"projectOrganization","subRelType":"participation","relClass":"isParticipant","source":"20|dedup_wf_001::157af406bc653aa4d9749318b644de43","target":"40|corda__h2020::5161f53ab205d803c36b4c888fe7deef"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od_______908::a47e1c3ede9a21ee5278a2e5c338d69b","target":"40|corda_______::189ff31d637eaaeaf4d3584dc490b1cf"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::189ff31d637eaaeaf4d3584dc490b1cf","target":"50|od_______908::a47e1c3ede9a21ee5278a2e5c338d69b"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od________18::a727cc288016db7132ef9a799aa83350","target":"40|corda_______::9826e8aba3e8f3a2a46545cf341838a8"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::9826e8aba3e8f3a2a46545cf341838a8","target":"50|od________18::a727cc288016db7132ef9a799aa83350"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od________18::062cf091d5c7a7d730001c34177042e3","target":"40|corda_______::9826e8aba3e8f3a2a46545cf341838a8"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::9826e8aba3e8f3a2a46545cf341838a8","target":"50|od________18::062cf091d5c7a7d730001c34177042e3"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|webcrawl____::68c191d9b972b47a235d311804c7f6f5","target":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64","target":"50|webcrawl____::68c191d9b972b47a235d311804c7f6f5"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od_______908::1b172ab34639e7935e2357119cf20830","target":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64","target":"50|od_______908::1b172ab34639e7935e2357119cf20830"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|doajarticles::cb234c66327d29ba5f13c0db7a4cf423","target":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::c3d0b21615b129cd7395e24f9cf6bb64","target":"50|doajarticles::cb234c66327d29ba5f13c0db7a4cf423"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______1146::e2fafaba636a14e408f02c6ea26acb0e","target":"40|corda_______::35695c955c51f0bb39482ce5477047c7"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::35695c955c51f0bb39482ce5477047c7","target":"50|od______1146::e2fafaba636a14e408f02c6ea26acb0e"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od_______908::b8e86ed982ff331764456e1f0759ed9c","target":"40|corda_______::35695c955c51f0bb39482ce5477047c7"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::35695c955c51f0bb39482ce5477047c7","target":"50|od_______908::b8e86ed982ff331764456e1f0759ed9c"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|webcrawl____::c472bf5944ce0495844d505d43d1c021","target":"40|corda_______::35695c955c51f0bb39482ce5477047c7"}
|
||||
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"relType":"resultProject","subRelType":"outcome","relClass":"produces","source":"40|corda_______::35695c955c51f0bb39482ce5477047c7","target":"50|webcrawl____::c472bf5944ce0495844d505d43d1c021"}
|
|
@ -1,14 +0,0 @@
|
|||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|od_______177::67c1385662f2fa0bde310bec15427646"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|doiboost____::8ea1631fa01adcbafc3f384b6a2c5cc3"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|od_______166::67c1385662f2fa0bde310bec15427646"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::d2a45f0f42d8dd66c364219924c37c3f","subRelType":"dedup","target":"50|od_______935::0bf7d9c5d2e1115a31cd558f83ae8ee3"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::d2a45f0f42d8dd66c364219924c37c3f","subRelType":"dedup","target":"50|doajarticles::d695fee344cb367a38ce6622f5fe9430"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|od_______267::14e952745e4b602ff72919aa881b8945"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|doiboost____::43941031067842fac90604d37b2a4149"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|core________::5c62b3ad05a23de613636607a424899d"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|scholexplore::1c467aabe5108ee840a4500d58f19328"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::cd4fc0411683ee762d50bfd30436f95b","subRelType":"dedup","target":"50|doiboost____::0ff61beeb12c49ed8a826b2b1883c8f8"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::cd4fc0411683ee762d50bfd30436f95b","subRelType":"dedup","target":"50|doajarticles::fca1220426b10ccb8b46e4967b353f37"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|doiboost____::dd96d41ee05d4022065c9d3096e1023a"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|erc_________::7d9a29ff323c2fe0ecf037189bf71b8e"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|webcrawl____::fdd999801fec35d4c6190bcabb850c52"}
|
|
@ -1,14 +0,0 @@
|
|||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|od_______908::a47e1c3ede9a21ee5278a2e5c338d69b"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|doiboost____::8ea1631fa01adcbafc3f384b6a2c5cc3"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"dedup","target":"50|od_______166::67c1385662f2fa0bde310bec15427646"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::d2a45f0f42d8dd66c364219924c37c3f","subRelType":"dedup","target":"50|od_______935::0bf7d9c5d2e1115a31cd558f83ae8ee3"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::d2a45f0f42d8dd66c364219924c37c3f","subRelType":"dedup","target":"50|doajarticles::d695fee344cb367a38ce6622f5fe9430"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|od_______267::14e952745e4b602ff72919aa881b8945"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|doiboost____::43941031067842fac90604d37b2a4149"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|core________::5c62b3ad05a23de613636607a424899d"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::325525b879b17d8059a4e58def2f7225","subRelType":"dedup","target":"50|scholexplore::1c467aabe5108ee840a4500d58f19328"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::cd4fc0411683ee762d50bfd30436f95b","subRelType":"dedup","target":"50|doiboost____::0ff61beeb12c49ed8a826b2b1883c8f8"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::cd4fc0411683ee762d50bfd30436f95b","subRelType":"dedup","target":"50|doajarticles::fca1220426b10ccb8b46e4967b353f37"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|doiboost____::dd96d41ee05d4022065c9d3096e1023a"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|erc_________::7d9a29ff323c2fe0ecf037189bf71b8e"}
|
||||
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"relClass":"merges","source":"50|dedup_wf_001::a87be24a4fcac13c9298f0cc3acfc6ea","subRelType":"dedup","target":"50|webcrawl____::fdd999801fec35d4c6190bcabb850c52"}
|
|
@ -1,13 +0,0 @@
|
|||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProducedBy","relType":"resultProject","source":"50|dedup_wf_001::3668b9bd87532a085dc7a18ce2086715","subRelType":"outcome","target":"40|corda_______::189ff31d637eaaeaf4d3584dc490b1cf"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::05c5c5d2920c01e194d6760f24885a82","subRelType":"provision","target":"20|dedup_wf_001::cd07e6c09886e59266fdbae32a9e319b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::07022f119fc3d1cb66fe84494aa820c9","subRelType":"provision","target":"20|doajarticles::c48e93350cf5287e604ef631f2a67087"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::09ea05970871d7d923caaa8d2416d10e","subRelType":"provision","target":"20|doajarticles::cd84ef51b2de10ff01d679e4e662594e"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0af8c8ecf992b177304eb8f5d978100b","subRelType":"provision","target":"20|doajarticles::4eb6845b141d2b36ed94918d2bf382f0"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0b48a767b2b8d323ccdcaf2d40642746","subRelType":"provision","target":"20|doajarticles::46a4942a4707e842611278cfa26789f9"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0dd9573adad4e5cc322612f6e9ecc8ce","subRelType":"provision","target":"20|doajarticles::e34526e7b5efb700ddb4544700234a0b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0e870ab15f231d954306bb658fc747a2","subRelType":"provision","target":"20|doajarticles::ccac83f4f971e3cdc194ddb796850a37"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f19a2d702e31d451e9806f701584c97","subRelType":"provision","target":"20|doajarticles::7a02d64772c121c1f10c17f8e2bf2aec"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f4b6db6c02966acbfb60af527728c85","subRelType":"provision","target":"20|doajarticles::acd96b3bd87b176202b8ea494c318b21"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::11f65dc66da7ef1b1f3a3e59199e4d70","subRelType":"provision","target":"20|dedup_wf_001::6132363e7458cbd7c22aa284c7df1307"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::149fd06e8702d94aa648641fd1602284","subRelType":"provision","target":"20|dedup_wf_001::35ae35032078bc33bc92e2b0f2ecfa17"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::15581a45537ceb854bbddee49b2942b4","subRelType":"provision","target":"20|doajarticles::0b25b0ce56da469cc8ad74c7d83c16a3"}
|
|
@ -1,13 +0,0 @@
|
|||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::018cb61ed43c01704decc66183ce5d60","subRelType":"provision","target":"20|dedup_wf_001::b9fff055ce5efacecbe4ef918c127f86"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::05c5c5d2920c01e194d6760f24885a82","subRelType":"provision","target":"20|dedup_wf_001::cd07e6c09886e59266fdbae32a9e319b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::07022f119fc3d1cb66fe84494aa820c9","subRelType":"provision","target":"20|doajarticles::c48e93350cf5287e604ef631f2a67087"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::09ea05970871d7d923caaa8d2416d10e","subRelType":"provision","target":"20|doajarticles::cd84ef51b2de10ff01d679e4e662594e"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0af8c8ecf992b177304eb8f5d978100b","subRelType":"provision","target":"20|doajarticles::4eb6845b141d2b36ed94918d2bf382f0"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0b48a767b2b8d323ccdcaf2d40642746","subRelType":"provision","target":"20|doajarticles::46a4942a4707e842611278cfa26789f9"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0dd9573adad4e5cc322612f6e9ecc8ce","subRelType":"provision","target":"20|doajarticles::e34526e7b5efb700ddb4544700234a0b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0e870ab15f231d954306bb658fc747a2","subRelType":"provision","target":"20|doajarticles::ccac83f4f971e3cdc194ddb796850a37"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f19a2d702e31d451e9806f701584c97","subRelType":"provision","target":"20|doajarticles::7a02d64772c121c1f10c17f8e2bf2aec"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f4b6db6c02966acbfb60af527728c85","subRelType":"provision","target":"20|doajarticles::acd96b3bd87b176202b8ea494c318b21"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::11f65dc66da7ef1b1f3a3e59199e4d70","subRelType":"provision","target":"20|dedup_wf_001::6132363e7458cbd7c22aa284c7df1307"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::149fd06e8702d94aa648641fd1602284","subRelType":"provision","target":"20|dedup_wf_001::35ae35032078bc33bc92e2b0f2ecfa17"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::15581a45537ceb854bbddee49b2942b4","subRelType":"provision","target":"20|doajarticles::0b25b0ce56da469cc8ad74c7d83c16a3"}
|
|
@ -1,13 +0,0 @@
|
|||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"hasParticipant","relType":"projectOrganization","source":"40|corda__h2020::5161f53ab205d803c36b4c888fe7deef","subRelType":"participation","target":"20|dedup_wf_001::157af406bc653aa4d9749318b644de43"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::05c5c5d2920c01e194d6760f24885a82","subRelType":"provision","target":"20|dedup_wf_001::cd07e6c09886e59266fdbae32a9e319b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::07022f119fc3d1cb66fe84494aa820c9","subRelType":"provision","target":"20|doajarticles::c48e93350cf5287e604ef631f2a67087"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::09ea05970871d7d923caaa8d2416d10e","subRelType":"provision","target":"20|doajarticles::cd84ef51b2de10ff01d679e4e662594e"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0af8c8ecf992b177304eb8f5d978100b","subRelType":"provision","target":"20|doajarticles::4eb6845b141d2b36ed94918d2bf382f0"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0b48a767b2b8d323ccdcaf2d40642746","subRelType":"provision","target":"20|doajarticles::46a4942a4707e842611278cfa26789f9"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0dd9573adad4e5cc322612f6e9ecc8ce","subRelType":"provision","target":"20|doajarticles::e34526e7b5efb700ddb4544700234a0b"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0e870ab15f231d954306bb658fc747a2","subRelType":"provision","target":"20|doajarticles::ccac83f4f971e3cdc194ddb796850a37"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f19a2d702e31d451e9806f701584c97","subRelType":"provision","target":"20|doajarticles::7a02d64772c121c1f10c17f8e2bf2aec"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::0f4b6db6c02966acbfb60af527728c85","subRelType":"provision","target":"20|doajarticles::acd96b3bd87b176202b8ea494c318b21"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::11f65dc66da7ef1b1f3a3e59199e4d70","subRelType":"provision","target":"20|dedup_wf_001::6132363e7458cbd7c22aa284c7df1307"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::149fd06e8702d94aa648641fd1602284","subRelType":"provision","target":"20|dedup_wf_001::35ae35032078bc33bc92e2b0f2ecfa17"}
|
||||
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":true,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"isProvidedBy","relType":"datasourceOrganization","source":"10|doajarticles::15581a45537ceb854bbddee49b2942b4","subRelType":"provision","target":"20|doajarticles::0b25b0ce56da469cc8ad74c7d83c16a3"}
|
|
@ -1,70 +1,66 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.2.3-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>dhp-workflows</artifactId>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<version>1.1.7-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>dhp-broker-events</artifactId>
|
||||
<artifactId>dhp-broker-events</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.11</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_2.11</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_2.11</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-core_2.11</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-sql_2.11</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.spark</groupId>
|
||||
<artifactId>spark-hive_2.11</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-schemas</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-pace-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-common</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib.dhp</groupId>
|
||||
<artifactId>dhp-schemas</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>dom4j</groupId>
|
||||
<artifactId>dom4j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>dom4j</groupId>
|
||||
<artifactId>dom4j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-openaire-broker-common</artifactId>
|
||||
<version>[2.0.1,3.0.0)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>eu.dnetlib</groupId>
|
||||
<artifactId>dnet-openaire-broker-common</artifactId>
|
||||
<version>[1.0.0,2.0.0)</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
</dependencies>
|
||||
|
||||
|
||||
</project>
|
|
@ -12,6 +12,7 @@ import org.apache.commons.codec.digest.DigestUtils;
|
|||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
|
@ -28,57 +29,65 @@ public class EventFactory {
|
|||
"yyyy-MM-dd"
|
||||
};
|
||||
|
||||
public static Event newBrokerEvent(final UpdateInfo<?> updateInfo) {
|
||||
public static Event newBrokerEvent(final Result source, final Result target, final UpdateInfo<?> updateInfo) {
|
||||
|
||||
final long now = new Date().getTime();
|
||||
|
||||
final Event res = new Event();
|
||||
|
||||
final Map<String, Object> map = createMapFromResult(updateInfo);
|
||||
final Map<String, Object> map = createMapFromResult(target, source, updateInfo);
|
||||
|
||||
final String payload = createPayload(target, updateInfo);
|
||||
|
||||
final String eventId = calculateEventId(
|
||||
updateInfo.getTopicPath(), updateInfo.getTarget().getResult().getOriginalId().get(0),
|
||||
updateInfo.getHighlightValueAsString());
|
||||
updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
|
||||
|
||||
res.setEventId(eventId);
|
||||
res.setProducerId(PRODUCER_ID);
|
||||
res.setPayload(updateInfo.asBrokerPayload().toJSON());
|
||||
res.setPayload(payload);
|
||||
res.setMap(map);
|
||||
res.setTopic(updateInfo.getTopicPath());
|
||||
res.setTopic(updateInfo.getTopic());
|
||||
res.setCreationDate(now);
|
||||
res.setExpiryDate(calculateExpiryDate(now));
|
||||
res.setInstantMessage(false);
|
||||
return res;
|
||||
}
|
||||
|
||||
private static Map<String, Object> createMapFromResult(final UpdateInfo<?> updateInfo) {
|
||||
private static String createPayload(final Result result, final UpdateInfo<?> updateInfo) {
|
||||
final OpenAireEventPayload payload = new OpenAireEventPayload();
|
||||
// TODO
|
||||
|
||||
updateInfo.compileHighlight(payload);
|
||||
|
||||
return payload.toJSON();
|
||||
}
|
||||
|
||||
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source,
|
||||
final UpdateInfo<?> updateInfo) {
|
||||
final Map<String, Object> map = new HashMap<>();
|
||||
|
||||
final Result source = updateInfo.getSource().getResult();
|
||||
final Result target = updateInfo.getTarget().getResult();
|
||||
|
||||
final List<KeyValue> collectedFrom = target.getCollectedfrom();
|
||||
final List<KeyValue> collectedFrom = oaf.getCollectedfrom();
|
||||
if (collectedFrom.size() == 1) {
|
||||
map.put("target_datasource_id", collectedFrom.get(0).getKey());
|
||||
map.put("target_datasource_name", collectedFrom.get(0).getValue());
|
||||
}
|
||||
|
||||
final List<String> ids = target.getOriginalId();
|
||||
final List<String> ids = oaf.getOriginalId();
|
||||
if (ids.size() > 0) {
|
||||
map.put("target_publication_id", ids.get(0));
|
||||
}
|
||||
|
||||
final List<StructuredProperty> titles = target.getTitle();
|
||||
final List<StructuredProperty> titles = oaf.getTitle();
|
||||
if (titles.size() > 0) {
|
||||
map.put("target_publication_title", titles.get(0));
|
||||
}
|
||||
|
||||
final long date = parseDateTolong(target.getDateofacceptance().getValue());
|
||||
final long date = parseDateTolong(oaf.getDateofacceptance().getValue());
|
||||
if (date > 0) {
|
||||
map.put("target_dateofacceptance", date);
|
||||
}
|
||||
|
||||
final List<StructuredProperty> subjects = target.getSubject();
|
||||
final List<StructuredProperty> subjects = oaf.getSubject();
|
||||
if (subjects.size() > 0) {
|
||||
map
|
||||
.put(
|
||||
|
@ -86,7 +95,7 @@ public class EventFactory {
|
|||
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
final List<Author> authors = target.getAuthor();
|
||||
final List<Author> authors = oaf.getAuthor();
|
||||
if (authors.size() > 0) {
|
||||
map
|
||||
.put(
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.model;
|
||||
|
||||
public enum Topic {
|
||||
|
||||
// ENRICHMENT MISSING
|
||||
ENRICH_MISSING_OA_VERSION("ENRICH/MISSING/OPENACCESS_VERSION"), ENRICH_MISSING_ABSTRACT(
|
||||
"ENRICH/MISSING/ABSTRACT"), ENRICH_MISSING_PUBLICATION_DATE(
|
||||
"ENRICH/MISSING/PUBLICATION_DATE"), ENRICH_MISSING_PID(
|
||||
"ENRICH/MISSING/PID"), ENRICH_MISSING_PROJECT("ENRICH/MISSING/PROJECT"), ENRICH_MISSING_SOFTWARE(
|
||||
"ENRICH/MISSING/SOFTWARE"), ENRICH_MISSING_SUBJECT_MESHEUROPMC(
|
||||
"ENRICH/MISSING/SUBJECT/MESHEUROPMC"), ENRICH_MISSING_SUBJECT_ARXIV(
|
||||
"ENRICH/MISSING/SUBJECT/ARXIV"), ENRICH_MISSING_SUBJECT_JEL(
|
||||
"ENRICH/MISSING/SUBJECT/JEL"), ENRICH_MISSING_SUBJECT_DDC(
|
||||
"ENRICH/MISSING/SUBJECT/DDC"), ENRICH_MISSING_SUBJECT_ACM(
|
||||
"ENRICH/MISSING/SUBJECT/ACM"), ENRICH_MISSING_SUBJECT_RVK(
|
||||
"ENRICH/MISSING/SUBJECT/RVK"), ENRICH_MISSING_AUTHOR_ORCID(
|
||||
"ENRICH/MISSING/AUTHOR/ORCID"),
|
||||
|
||||
// ENRICHMENT MORE
|
||||
ENRICH_MORE_PID("ENRICH/MORE/PID"), ENRICH_MORE_OA_VERSION("ENRICH/MORE/OPENACCESS_VERSION"), ENRICH_MORE_ABSTRACT(
|
||||
"ENRICH/MORE/ABSTRACT"), ENRICH_MORE_PUBLICATION_DATE("ENRICH/MORE/PUBLICATION_DATE"), ENRICH_MORE_PROJECT(
|
||||
"ENRICH/MORE/PROJECT"), ENRICH_MORE_SOFTWARE("ENRICH/MORE/SOFTWARE"), ENRICH_MORE_SUBJECT_MESHEUROPMC(
|
||||
"ENRICH/MORE/SUBJECT/MESHEUROPMC"), ENRICH_MORE_SUBJECT_ARXIV(
|
||||
"ENRICH/MORE/SUBJECT/ARXIV"), ENRICH_MORE_SUBJECT_JEL(
|
||||
"ENRICH/MORE/SUBJECT/JEL"), ENRICH_MORE_SUBJECT_DDC(
|
||||
"ENRICH/MORE/SUBJECT/DDC"), ENRICH_MORE_SUBJECT_ACM(
|
||||
"ENRICH/MORE/SUBJECT/ACM"), ENRICH_MORE_SUBJECT_RVK("ENRICH/MORE/SUBJECT/RVK"),
|
||||
|
||||
// ADDITION
|
||||
ADD_BY_PROJECT("ADD/BY_PROJECT"),
|
||||
|
||||
// OTHER RELS
|
||||
ENRICH_MISSING_PUBLICATION_IS_RELATED_TO(
|
||||
"ENRICH/MISSING/PUBLICATION/IS_RELATED_TO"), ENRICH_MISSING_PUBLICATION_REFERENCES(
|
||||
"ENRICH/MISSING/PUBLICATION/REFERENCES"), ENRICH_MISSING_PUBLICATION_IS_REFERENCED_BY(
|
||||
"ENRICH/MISSING/PUBLICATION/IS_REFERENCED_BY"), ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_TO(
|
||||
"ENRICH/MISSING/PUBLICATION/IS_SUPPLEMENTED_TO"), ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_BY(
|
||||
"ENRICH/MISSING/PUBLICATION/IS_SUPPLEMENTED_BY"),
|
||||
|
||||
ENRICH_MISSING_DATASET_IS_RELATED_TO("ENRICH/MISSING/DATASET/IS_RELATED_TO"), ENRICH_MISSING_DATASET_REFERENCES(
|
||||
"ENRICH/MISSING/DATASET/REFERENCES"), ENRICH_MISSING_DATASET_IS_REFERENCED_BY(
|
||||
"ENRICH/MISSING/DATASET/IS_REFERENCED_BY"), ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_TO(
|
||||
"ENRICH/MISSING/DATASET/IS_SUPPLEMENTED_TO"), ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_BY(
|
||||
"ENRICH/MISSING/DATASET/IS_SUPPLEMENTED_BY"),;
|
||||
|
||||
Topic(final String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
protected String path;
|
||||
|
||||
public String getPath() {
|
||||
return this.path;
|
||||
}
|
||||
|
||||
public static Topic fromPath(final String path) {
|
||||
for (final Topic t : Topic.values()) {
|
||||
if (t.getPath().equals(path)) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -3,16 +3,14 @@ package eu.dnetlib.dhp.broker.oa;
|
|||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.apache.spark.sql.TypedColumn;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -20,25 +18,20 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.broker.model.Event;
|
||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EventFinder;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EventGroup;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultAggregator;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedEntityFactory;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelationsAggregator;
|
||||
import eu.dnetlib.dhp.broker.model.EventFactory;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAbstract;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAuthorOrcid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingProject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPublicationDate;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMorePid;
|
||||
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
import eu.dnetlib.pace.config.DedupConfig;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class GenerateEventsApplication {
|
||||
|
||||
|
@ -51,7 +44,7 @@ public class GenerateEventsApplication {
|
|||
IOUtils
|
||||
.toString(
|
||||
GenerateEventsApplication.class
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/broker/oa/generate_broker_events.json")));
|
||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
|
||||
parser.parseArgument(args);
|
||||
|
||||
final Boolean isSparkSessionManaged = Optional
|
||||
|
@ -66,27 +59,10 @@ public class GenerateEventsApplication {
|
|||
final String eventsPath = parser.get("eventsPath");
|
||||
log.info("eventsPath: {}", eventsPath);
|
||||
|
||||
final String isLookupUrl = parser.get("isLookupUrl");
|
||||
log.info("isLookupUrl: {}", isLookupUrl);
|
||||
|
||||
final String dedupConfigProfileId = parser.get("dedupConfProfile");
|
||||
log.info("dedupConfigProfileId: {}", dedupConfigProfileId);
|
||||
|
||||
final SparkConf conf = new SparkConf();
|
||||
|
||||
final DedupConfig dedupConfig = loadDedupConfig(isLookupUrl, dedupConfigProfileId);
|
||||
|
||||
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
||||
|
||||
removeOutputDir(spark, eventsPath);
|
||||
|
||||
final Dataset<Event> all = spark.emptyDataset(Encoders.kryo(Event.class));
|
||||
|
||||
for (final Class<? extends Result> r1 : BrokerConstants.RESULT_CLASSES) {
|
||||
all.union(generateEvents(spark, graphPath, r1, dedupConfig));
|
||||
}
|
||||
|
||||
all.write().mode(SaveMode.Overwrite).option("compression", "gzip").json(eventsPath);
|
||||
generateEvents(spark, graphPath, eventsPath);
|
||||
});
|
||||
|
||||
}
|
||||
|
@ -95,112 +71,42 @@ public class GenerateEventsApplication {
|
|||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||
}
|
||||
|
||||
private static <SRC extends Result> Dataset<Event> generateEvents(
|
||||
final SparkSession spark,
|
||||
final String graphPath,
|
||||
final Class<SRC> sourceClass,
|
||||
final DedupConfig dedupConfig) {
|
||||
|
||||
final Dataset<ResultWithRelations> results = expandResultsWithRelations(spark, graphPath, sourceClass);
|
||||
|
||||
final Dataset<Relation> mergedRels = readPath(spark, graphPath + "/relation", Relation.class)
|
||||
.filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS));
|
||||
|
||||
final TypedColumn<Tuple2<ResultWithRelations, Relation>, ResultGroup> aggr = new ResultAggregator()
|
||||
.toColumn();
|
||||
|
||||
return results
|
||||
.joinWith(mergedRels, results.col("result.id").equalTo(mergedRels.col("source")), "inner")
|
||||
.groupByKey(
|
||||
(MapFunction<Tuple2<ResultWithRelations, Relation>, String>) t -> t._2.getTarget(), Encoders.STRING())
|
||||
.agg(aggr)
|
||||
.map((MapFunction<Tuple2<String, ResultGroup>, ResultGroup>) t -> t._2, Encoders.kryo(ResultGroup.class))
|
||||
.filter(ResultGroup::isValid)
|
||||
.map(
|
||||
(MapFunction<ResultGroup, EventGroup>) g -> EventFinder.generateEvents(g, dedupConfig),
|
||||
Encoders.kryo(EventGroup.class))
|
||||
.flatMap(group -> group.getData().iterator(), Encoders.kryo(Event.class));
|
||||
private static void generateEvents(final SparkSession spark, final String graphPath, final String eventsPath) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
private static <SRC extends Result> Dataset<ResultWithRelations> expandResultsWithRelations(
|
||||
final SparkSession spark,
|
||||
final String graphPath,
|
||||
final Class<SRC> sourceClass) {
|
||||
final Dataset<Project> projects = readPath(spark, graphPath + "/project", Project.class);
|
||||
final Dataset<eu.dnetlib.dhp.schema.oaf.Dataset> datasets = readPath(
|
||||
spark, graphPath + "/dataset", eu.dnetlib.dhp.schema.oaf.Dataset.class);
|
||||
final Dataset<Software> softwares = readPath(spark, graphPath + "/software", Software.class);
|
||||
final Dataset<Publication> publications = readPath(spark, graphPath + "/publication", Publication.class);
|
||||
private List<Event> generateEvents(final Result... children) {
|
||||
final List<Event> list = new ArrayList<>();
|
||||
|
||||
final Dataset<Relation> rels = readPath(spark, graphPath + "/relation", Relation.class)
|
||||
.filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS));
|
||||
for (final Result source : children) {
|
||||
for (final Result target : children) {
|
||||
if (source != target) {
|
||||
list
|
||||
.addAll(
|
||||
findUpdates(source, target)
|
||||
.stream()
|
||||
.map(info -> EventFactory.newBrokerEvent(source, target, info))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final Dataset<ResultWithRelations> r0 = readPath(
|
||||
spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), Result.class)
|
||||
.filter(r -> r.getDataInfo().getDeletedbyinference())
|
||||
.map(r -> new ResultWithRelations(r), Encoders.kryo(ResultWithRelations.class));
|
||||
final Dataset<ResultWithRelations> r1 = join(r0, rels, relatedEntities(projects, rels, RelatedProject.class));
|
||||
final Dataset<ResultWithRelations> r2 = join(r1, rels, relatedEntities(softwares, rels, RelatedProject.class));
|
||||
final Dataset<ResultWithRelations> r3 = join(r2, rels, relatedEntities(datasets, rels, RelatedProject.class));
|
||||
final Dataset<ResultWithRelations> r4 = join(
|
||||
r3, rels, relatedEntities(publications, rels, RelatedProject.class));
|
||||
;
|
||||
|
||||
return r4;
|
||||
return list;
|
||||
}
|
||||
|
||||
private static <T, RT> Dataset<RT> relatedEntities(final Dataset<T> targets,
|
||||
final Dataset<Relation> rels,
|
||||
final Class<RT> clazz) {
|
||||
return rels
|
||||
.joinWith(targets, targets.col("id").equalTo(rels.col("target")), "inner")
|
||||
.map(
|
||||
t -> RelatedEntityFactory.newRelatedEntity(t._1.getSource(), t._1.getRelType(), t._2, clazz),
|
||||
Encoders.kryo(clazz));
|
||||
}
|
||||
|
||||
private static <T> Dataset<ResultWithRelations> join(final Dataset<ResultWithRelations> sources,
|
||||
final Dataset<Relation> rels,
|
||||
final Dataset<T> typedRels) {
|
||||
|
||||
final TypedColumn<Tuple2<ResultWithRelations, T>, ResultWithRelations> aggr = new ResultWithRelationsAggregator<T>()
|
||||
.toColumn();
|
||||
;
|
||||
|
||||
return sources
|
||||
.joinWith(typedRels, sources.col("result.id").equalTo(rels.col("source")), "left_outer")
|
||||
.groupByKey(
|
||||
(MapFunction<Tuple2<ResultWithRelations, T>, String>) t -> t._1.getResult().getId(), Encoders.STRING())
|
||||
.agg(aggr)
|
||||
.map(t -> t._2, Encoders.kryo(ResultWithRelations.class));
|
||||
}
|
||||
|
||||
public static <R> Dataset<R> readPath(
|
||||
final SparkSession spark,
|
||||
final String inputPath,
|
||||
final Class<R> clazz) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
private static DedupConfig loadDedupConfig(final String isLookupUrl, final String profId) throws Exception {
|
||||
final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
||||
|
||||
final String conf = isLookUpService
|
||||
.getResourceProfileByQuery(
|
||||
String
|
||||
.format(
|
||||
"for $x in /RESOURCE_PROFILE[.//RESOURCE_IDENTIFIER/@value = '%s'] return $x//DEDUPLICATION/text()",
|
||||
profId));
|
||||
|
||||
final DedupConfig dedupConfig = new ObjectMapper().readValue(conf, DedupConfig.class);
|
||||
dedupConfig.getPace().initModel();
|
||||
dedupConfig.getPace().initTranslationMap();
|
||||
// dedupConfig.getWf().setConfigurationId("???");
|
||||
|
||||
return dedupConfig;
|
||||
private List<UpdateInfo<?>> findUpdates(final Result source, final Result target) {
|
||||
final List<UpdateInfo<?>> list = new ArrayList<>();
|
||||
list.addAll(EnrichMissingAbstract.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingAuthorOrcid.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingOpenAccess.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingPid.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingProject.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingPublicationDate.findUpdates(source, target));
|
||||
list.addAll(EnrichMissingSubject.findUpdates(source, target));
|
||||
list.addAll(EnrichMoreOpenAccess.findUpdates(source, target));
|
||||
list.addAll(EnrichMorePid.findUpdates(source, target));
|
||||
list.addAll(EnrichMoreSubject.findUpdates(source, target));
|
||||
return list;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import eu.dnetlib.broker.objects.Publication;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.pace.config.DedupConfig;
|
||||
|
||||
public abstract class UpdateMatcher<T> {
|
||||
|
||||
private final boolean multipleUpdate;
|
||||
private final Function<T, Topic> topicFunction;
|
||||
private final BiConsumer<Publication, T> compileHighlightFunction;
|
||||
private final Function<T, String> highlightToStringFunction;
|
||||
|
||||
public UpdateMatcher(final boolean multipleUpdate, final Function<T, Topic> topicFunction,
|
||||
final BiConsumer<Publication, T> compileHighlightFunction,
|
||||
final Function<T, String> highlightToStringFunction) {
|
||||
this.multipleUpdate = multipleUpdate;
|
||||
this.topicFunction = topicFunction;
|
||||
this.compileHighlightFunction = compileHighlightFunction;
|
||||
this.highlightToStringFunction = highlightToStringFunction;
|
||||
}
|
||||
|
||||
public Collection<UpdateInfo<T>> searchUpdatesForRecord(final ResultWithRelations res,
|
||||
final Collection<ResultWithRelations> others,
|
||||
final DedupConfig dedupConfig) {
|
||||
|
||||
final Map<String, UpdateInfo<T>> infoMap = new HashMap<>();
|
||||
|
||||
for (final ResultWithRelations source : others) {
|
||||
if (source != res) {
|
||||
for (final T hl : findDifferences(source, res)) {
|
||||
final Topic topic = getTopicFunction().apply(hl);
|
||||
final UpdateInfo<T> info = new UpdateInfo<>(topic, hl, source, res, getCompileHighlightFunction(),
|
||||
getHighlightToStringFunction(),
|
||||
dedupConfig);
|
||||
final String s = DigestUtils.md5Hex(info.getHighlightValueAsString());
|
||||
if (!infoMap.containsKey(s) || infoMap.get(s).getTrust() < info.getTrust()) {
|
||||
} else {
|
||||
infoMap.put(s, info);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final Collection<UpdateInfo<T>> values = infoMap.values();
|
||||
|
||||
if (values.isEmpty() || multipleUpdate) {
|
||||
return values;
|
||||
} else {
|
||||
final UpdateInfo<T> v = values
|
||||
.stream()
|
||||
.sorted((o1, o2) -> Float.compare(o1.getTrust(), o2.getTrust()))
|
||||
.findFirst()
|
||||
.get();
|
||||
return Arrays.asList(v);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract List<T> findDifferences(ResultWithRelations source, ResultWithRelations target);
|
||||
|
||||
protected static boolean isMissing(final List<Field<String>> list) {
|
||||
return list == null || list.isEmpty() || StringUtils.isBlank(list.get(0).getValue());
|
||||
}
|
||||
|
||||
protected boolean isMissing(final Field<String> field) {
|
||||
return field == null || StringUtils.isBlank(field.getValue());
|
||||
}
|
||||
|
||||
public boolean isMultipleUpdate() {
|
||||
return multipleUpdate;
|
||||
}
|
||||
|
||||
public Function<T, Topic> getTopicFunction() {
|
||||
return topicFunction;
|
||||
}
|
||||
|
||||
public BiConsumer<Publication, T> getCompileHighlightFunction() {
|
||||
return compileHighlightFunction;
|
||||
}
|
||||
|
||||
public Function<T, String> getHighlightToStringFunction() {
|
||||
return highlightToStringFunction;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedDataset;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
|
||||
public abstract class AbstractEnrichMissingDataset
|
||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Dataset> {
|
||||
|
||||
public AbstractEnrichMissingDataset(final Topic topic) {
|
||||
super(true,
|
||||
rel -> topic,
|
||||
(p, rel) -> p.getDatasets().add(rel),
|
||||
rel -> rel.getInstances().get(0).getUrl());
|
||||
}
|
||||
|
||||
protected abstract boolean filterByType(String relType);
|
||||
|
||||
@Override
|
||||
protected final List<eu.dnetlib.broker.objects.Dataset> findDifferences(
|
||||
final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
final Set<String> existingDatasets = target
|
||||
.getDatasets()
|
||||
.stream()
|
||||
.filter(rel -> filterByType(rel.getRelType()))
|
||||
.map(RelatedDataset::getRelDataset)
|
||||
.map(Dataset::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getDatasets()
|
||||
.stream()
|
||||
.filter(rel -> filterByType(rel.getRelType()))
|
||||
.map(RelatedDataset::getRelDataset)
|
||||
.filter(d -> !existingDatasets.contains(d.getId()))
|
||||
.map(ConversionUtils::oafDatasetToBrokerDataset)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingDatasetIsReferencedBy extends AbstractEnrichMissingDataset {
|
||||
|
||||
public EnrichMissingDatasetIsReferencedBy() {
|
||||
super(Topic.ENRICH_MISSING_DATASET_IS_REFERENCED_BY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isReferencedBy");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingDatasetIsRelatedTo extends AbstractEnrichMissingDataset {
|
||||
|
||||
public EnrichMissingDatasetIsRelatedTo() {
|
||||
super(Topic.ENRICH_MISSING_DATASET_IS_RELATED_TO);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isRelatedTo");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingDatasetIsSupplementedBy extends AbstractEnrichMissingDataset {
|
||||
|
||||
public EnrichMissingDatasetIsSupplementedBy() {
|
||||
super(Topic.ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_BY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isSupplementedBy");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingDatasetIsSupplementedTo extends AbstractEnrichMissingDataset {
|
||||
|
||||
public EnrichMissingDatasetIsSupplementedTo() {
|
||||
super(Topic.ENRICH_MISSING_DATASET_IS_SUPPLEMENTED_TO);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isSupplementedTo");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingDatasetReferences extends AbstractEnrichMissingDataset {
|
||||
|
||||
public EnrichMissingDatasetReferences() {
|
||||
super(Topic.ENRICH_MISSING_DATASET_REFERENCES);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("references");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedProjects;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.broker.objects.Project;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingProject
|
||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Project> {
|
||||
|
||||
public EnrichMissingProject() {
|
||||
super(true,
|
||||
prj -> Topic.ENRICH_MISSING_PROJECT,
|
||||
(p, prj) -> p.getProjects().add(prj),
|
||||
prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Project> findDifferences(final ResultWithRelations source, final ResultWithRelations target) {
|
||||
if (source.getProjects().isEmpty()) {
|
||||
return Arrays.asList();
|
||||
} else {
|
||||
return target
|
||||
.getProjects()
|
||||
.stream()
|
||||
.map(RelatedProject::getRelProject)
|
||||
.map(ConversionUtils::oafProjectToBrokerProject)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedProjects;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
|
||||
public class EnrichMoreProject extends UpdateMatcher<eu.dnetlib.broker.objects.Project> {
|
||||
|
||||
public EnrichMoreProject() {
|
||||
super(true,
|
||||
prj -> Topic.ENRICH_MORE_PROJECT,
|
||||
(p, prj) -> p.getProjects().add(prj),
|
||||
prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<eu.dnetlib.broker.objects.Project> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
final Set<String> existingProjects = source
|
||||
.getProjects()
|
||||
.stream()
|
||||
.map(RelatedProject::getRelProject)
|
||||
.map(Project::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return target
|
||||
.getProjects()
|
||||
.stream()
|
||||
.map(RelatedProject::getRelProject)
|
||||
.filter(p -> !existingProjects.contains(p.getId()))
|
||||
.map(ConversionUtils::oafProjectToBrokerProject)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedPublication;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
|
||||
public abstract class AbstractEnrichMissingPublication
|
||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Publication> {
|
||||
|
||||
public AbstractEnrichMissingPublication(final Topic topic) {
|
||||
super(true,
|
||||
rel -> topic,
|
||||
(p, rel) -> p.getPublications().add(rel),
|
||||
rel -> rel.getInstances().get(0).getUrl());
|
||||
|
||||
}
|
||||
|
||||
protected abstract boolean filterByType(String relType);
|
||||
|
||||
@Override
|
||||
protected final List<eu.dnetlib.broker.objects.Publication> findDifferences(
|
||||
final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
final Set<String> existingPublications = target
|
||||
.getPublications()
|
||||
.stream()
|
||||
.filter(rel -> filterByType(rel.getRelType()))
|
||||
.map(RelatedPublication::getRelPublication)
|
||||
.map(Publication::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getPublications()
|
||||
.stream()
|
||||
.filter(rel -> filterByType(rel.getRelType()))
|
||||
.map(RelatedPublication::getRelPublication)
|
||||
.filter(d -> !existingPublications.contains(d.getId()))
|
||||
.map(ConversionUtils::oafResultToBrokerPublication)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingPublicationIsReferencedBy extends AbstractEnrichMissingPublication {
|
||||
|
||||
public EnrichMissingPublicationIsReferencedBy() {
|
||||
super(Topic.ENRICH_MISSING_PUBLICATION_IS_REFERENCED_BY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isReferencedBy");
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingPublicationIsRelatedTo extends AbstractEnrichMissingPublication {
|
||||
|
||||
public EnrichMissingPublicationIsRelatedTo() {
|
||||
super(Topic.ENRICH_MISSING_PUBLICATION_IS_RELATED_TO);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isRelatedTo");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingPublicationIsSupplementedBy extends AbstractEnrichMissingPublication {
|
||||
|
||||
public EnrichMissingPublicationIsSupplementedBy() {
|
||||
super(Topic.ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_BY);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isSupplementedBy");
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingPublicationIsSupplementedTo extends AbstractEnrichMissingPublication {
|
||||
|
||||
public EnrichMissingPublicationIsSupplementedTo() {
|
||||
super(Topic.ENRICH_MISSING_PUBLICATION_IS_SUPPLEMENTED_TO);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("isSupplementedTo");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedPublications;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
|
||||
public class EnrichMissingPublicationReferences extends AbstractEnrichMissingPublication {
|
||||
|
||||
public EnrichMissingPublicationReferences() {
|
||||
super(Topic.ENRICH_MISSING_PUBLICATION_REFERENCES);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean filterByType(final String relType) {
|
||||
return relType.equals("references");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedSoftware;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingSoftware
|
||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Software> {
|
||||
|
||||
public EnrichMissingSoftware() {
|
||||
super(true,
|
||||
s -> Topic.ENRICH_MISSING_SOFTWARE,
|
||||
(p, s) -> p.getSoftwares().add(s),
|
||||
s -> s.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
||||
final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
if (source.getSoftwares().isEmpty()) {
|
||||
return Arrays.asList();
|
||||
} else {
|
||||
return target
|
||||
.getSoftwares()
|
||||
.stream()
|
||||
.map(RelatedSoftware::getRelSoftware)
|
||||
.map(ConversionUtils::oafSoftwareToBrokerSoftware)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedSoftware;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
|
||||
public class EnrichMoreSoftware
|
||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Software> {
|
||||
|
||||
public EnrichMoreSoftware() {
|
||||
super(true,
|
||||
s -> Topic.ENRICH_MORE_SOFTWARE,
|
||||
(p, s) -> p.getSoftwares().add(s),
|
||||
s -> s.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
||||
final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
final Set<String> existingSoftwares = source
|
||||
.getSoftwares()
|
||||
.stream()
|
||||
.map(RelatedSoftware::getRelSoftware)
|
||||
.map(Software::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return target
|
||||
.getSoftwares()
|
||||
.stream()
|
||||
.map(RelatedSoftware::getRelSoftware)
|
||||
.filter(p -> !existingSoftwares.contains(p.getId()))
|
||||
.map(ConversionUtils::oafSoftwareToBrokerSoftware)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingAbstract extends UpdateMatcher<String> {
|
||||
|
||||
public EnrichMissingAbstract() {
|
||||
super(false,
|
||||
s -> Topic.ENRICH_MISSING_ABSTRACT,
|
||||
(p, s) -> p.getAbstracts().add(s),
|
||||
s -> s);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
if (isMissing(target.getResult().getDescription()) && !isMissing(source.getResult().getDescription())) {
|
||||
return Arrays
|
||||
.asList(source.getResult().getDescription().get(0).getValue());
|
||||
}
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
public class EnrichMissingAuthorOrcid extends UpdateMatcher<String> {
|
||||
|
||||
public EnrichMissingAuthorOrcid() {
|
||||
super(true,
|
||||
aut -> Topic.ENRICH_MISSING_AUTHOR_ORCID,
|
||||
(p, aut) -> p.getCreators().add(aut),
|
||||
aut -> aut);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
|
||||
final Set<String> existingOrcids = target
|
||||
.getResult()
|
||||
.getAuthor()
|
||||
.stream()
|
||||
.map(Author::getPid)
|
||||
.flatMap(List::stream)
|
||||
.filter(pid -> pid.getQualifier().getClassid().equalsIgnoreCase("orcid"))
|
||||
.map(pid -> pid.getValue())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
final List<String> list = new ArrayList<>();
|
||||
|
||||
for (final Author author : source.getResult().getAuthor()) {
|
||||
final String name = author.getFullname();
|
||||
|
||||
for (final StructuredProperty pid : author.getPid()) {
|
||||
if (pid.getQualifier().getClassid().equalsIgnoreCase("orcid")
|
||||
&& !existingOrcids.contains(pid.getValue())) {
|
||||
list.add(name + " [ORCID: " + pid.getValue() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingOpenAccess extends UpdateMatcher<Instance> {
|
||||
|
||||
public EnrichMissingOpenAccess() {
|
||||
super(true,
|
||||
i -> Topic.ENRICH_MISSING_OA_VERSION,
|
||||
(p, i) -> p.getInstances().add(i),
|
||||
Instance::getUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Instance> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final long count = target
|
||||
.getResult()
|
||||
.getInstance()
|
||||
.stream()
|
||||
.map(i -> i.getAccessright().getClassid())
|
||||
.filter(right -> right.equals(BrokerConstants.OPEN_ACCESS))
|
||||
.count();
|
||||
|
||||
if (count > 0) {
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getInstance()
|
||||
.stream()
|
||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingPid extends UpdateMatcher<Pid> {
|
||||
|
||||
public EnrichMissingPid() {
|
||||
super(true,
|
||||
pid -> Topic.ENRICH_MISSING_PID,
|
||||
(p, pid) -> p.getPids().add(pid),
|
||||
pid -> pid.getType() + "::" + pid.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Pid> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final long count = target.getResult().getPid().size();
|
||||
|
||||
if (count > 0) {
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getPid()
|
||||
.stream()
|
||||
.map(ConversionUtils::oafPidToBrokerPid)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMissingPublicationDate extends UpdateMatcher<String> {
|
||||
|
||||
public EnrichMissingPublicationDate() {
|
||||
super(false,
|
||||
date -> Topic.ENRICH_MISSING_PUBLICATION_DATE,
|
||||
(p, date) -> p.setPublicationdate(date),
|
||||
s -> s);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
if (isMissing(target.getResult().getDateofacceptance())
|
||||
&& !isMissing(source.getResult().getDateofacceptance())) {
|
||||
return Arrays.asList(source.getResult().getDateofacceptance().getValue());
|
||||
}
|
||||
return new ArrayList<>();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
public class EnrichMissingSubject extends UpdateMatcher<Pair<String, String>> {
|
||||
|
||||
public EnrichMissingSubject() {
|
||||
super(true,
|
||||
pair -> Topic.fromPath("ENRICH/MISSING/SUBJECT/" + pair.getLeft()),
|
||||
(p, pair) -> p.getSubjects().add(pair.getRight()),
|
||||
pair -> pair.getLeft() + "::" + pair.getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Pair<String, String>> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final Set<String> existingTypes = target
|
||||
.getResult()
|
||||
.getSubject()
|
||||
.stream()
|
||||
.map(StructuredProperty::getQualifier)
|
||||
.map(Qualifier::getClassid)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getPid()
|
||||
.stream()
|
||||
.filter(pid -> !existingTypes.contains(pid.getQualifier().getClassid()))
|
||||
.map(ConversionUtils::oafSubjectToPair)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMoreOpenAccess extends UpdateMatcher<Instance> {
|
||||
|
||||
public EnrichMoreOpenAccess() {
|
||||
super(true,
|
||||
i -> Topic.ENRICH_MORE_OA_VERSION,
|
||||
(p, i) -> p.getInstances().add(i),
|
||||
Instance::getUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Instance> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final Set<String> urls = target
|
||||
.getResult()
|
||||
.getInstance()
|
||||
.stream()
|
||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
||||
.map(i -> i.getUrl())
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getInstance()
|
||||
.stream()
|
||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||
.flatMap(List::stream)
|
||||
.filter(i -> !urls.contains(i.getUrl()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMorePid extends UpdateMatcher<Pid> {
|
||||
|
||||
public EnrichMorePid() {
|
||||
super(true,
|
||||
pid -> Topic.ENRICH_MORE_PID,
|
||||
(p, pid) -> p.getPids().add(pid),
|
||||
pid -> pid.getType() + "::" + pid.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Pid> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final Set<String> existingPids = target
|
||||
.getResult()
|
||||
.getPid()
|
||||
.stream()
|
||||
.map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getPid()
|
||||
.stream()
|
||||
.filter(pid -> !existingPids.contains(pid.getQualifier().getClassid() + "::" + pid.getValue()))
|
||||
.map(ConversionUtils::oafPidToBrokerPid)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class EnrichMoreSubject extends UpdateMatcher<Pair<String, String>> {
|
||||
|
||||
public EnrichMoreSubject() {
|
||||
super(true,
|
||||
pair -> Topic.fromPath("ENRICH/MORE/SUBJECT/" + pair.getLeft()),
|
||||
(p, pair) -> p.getSubjects().add(pair.getRight()),
|
||||
pair -> pair.getLeft() + "::" + pair.getRight());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Pair<String, String>> findDifferences(final ResultWithRelations source,
|
||||
final ResultWithRelations target) {
|
||||
final Set<String> existingSubjects = target
|
||||
.getResult()
|
||||
.getSubject()
|
||||
.stream()
|
||||
.map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return source
|
||||
.getResult()
|
||||
.getPid()
|
||||
.stream()
|
||||
.filter(pid -> !existingSubjects.contains(pid.getQualifier().getClassid() + "::" + pid.getValue()))
|
||||
.map(ConversionUtils::oafSubjectToPair)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
|
||||
public class BrokerConstants {
|
||||
|
||||
public static final String OPEN_ACCESS = "OPEN";
|
||||
public static final String IS_MERGED_IN_CLASS = "isMergedIn";
|
||||
|
||||
public static final float MIN_TRUST = 0.25f;
|
||||
public static final float MAX_TRUST = 1.00f;
|
||||
|
||||
public static final List<Class<? extends Result>> RESULT_CLASSES = Arrays
|
||||
.asList(Publication.class, Dataset.class, Software.class, OtherResearchProduct.class);
|
||||
|
||||
}
|
|
@ -1,184 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.dom4j.Document;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.dom4j.DocumentHelper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.ExternalReference;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
public class ConversionUtils {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ConversionUtils.class);
|
||||
|
||||
public static List<eu.dnetlib.broker.objects.Instance> oafInstanceToBrokerInstances(final Instance i) {
|
||||
return i.getUrl().stream().map(url -> {
|
||||
return new eu.dnetlib.broker.objects.Instance()
|
||||
.setUrl(url)
|
||||
.setInstancetype(i.getInstancetype().getClassid())
|
||||
.setLicense(BrokerConstants.OPEN_ACCESS)
|
||||
.setHostedby(i.getHostedby().getValue());
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public static Pid oafPidToBrokerPid(final StructuredProperty sp) {
|
||||
return sp != null ? new Pid()
|
||||
.setValue(sp.getValue())
|
||||
.setType(sp.getQualifier().getClassid()) : null;
|
||||
}
|
||||
|
||||
public static final Pair<String, String> oafSubjectToPair(final StructuredProperty sp) {
|
||||
return sp != null ? Pair.of(sp.getQualifier().getClassid(), sp.getValue()) : null;
|
||||
}
|
||||
|
||||
public static final eu.dnetlib.broker.objects.Dataset oafDatasetToBrokerDataset(final Dataset d) {
|
||||
return d != null ? new eu.dnetlib.broker.objects.Dataset()
|
||||
.setOriginalId(d.getOriginalId().get(0))
|
||||
.setTitles(structPropList(d.getTitle()))
|
||||
.setPids(d.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
||||
.setInstances(
|
||||
d
|
||||
.getInstance()
|
||||
.stream()
|
||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList()))
|
||||
.setCollectedFrom(d.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList()))
|
||||
: null;
|
||||
}
|
||||
|
||||
public static final eu.dnetlib.broker.objects.Publication oafResultToBrokerPublication(final Result result) {
|
||||
|
||||
return result != null ? new eu.dnetlib.broker.objects.Publication()
|
||||
.setOriginalId(result.getOriginalId().get(0))
|
||||
.setTitles(structPropList(result.getTitle()))
|
||||
.setAbstracts(fieldList(result.getDescription()))
|
||||
.setLanguage(result.getLanguage().getClassid())
|
||||
.setSubjects(structPropList(result.getSubject()))
|
||||
.setCreators(result.getAuthor().stream().map(Author::getFullname).collect(Collectors.toList()))
|
||||
.setPublicationdate(result.getDateofcollection())
|
||||
.setPublisher(fieldValue(result.getPublisher()))
|
||||
.setEmbargoenddate(fieldValue(result.getEmbargoenddate()))
|
||||
.setContributor(fieldList(result.getContributor()))
|
||||
.setJournal(
|
||||
result instanceof Publication ? oafJournalToBrokerJournal(((Publication) result).getJournal()) : null)
|
||||
.setCollectedFrom(result.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList()))
|
||||
.setPids(result.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
||||
.setInstances(
|
||||
result
|
||||
.getInstance()
|
||||
.stream()
|
||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList()))
|
||||
.setExternalReferences(
|
||||
result
|
||||
.getExternalReference()
|
||||
.stream()
|
||||
.map(ConversionUtils::oafExtRefToBrokerExtRef)
|
||||
.collect(Collectors.toList()))
|
||||
: null;
|
||||
}
|
||||
|
||||
private static eu.dnetlib.broker.objects.Journal oafJournalToBrokerJournal(final Journal journal) {
|
||||
return journal != null ? new eu.dnetlib.broker.objects.Journal()
|
||||
.setName(journal.getName())
|
||||
.setIssn(journal.getIssnPrinted())
|
||||
.setEissn(journal.getIssnOnline())
|
||||
.setLissn(journal.getIssnLinking()) : null;
|
||||
}
|
||||
|
||||
private static eu.dnetlib.broker.objects.ExternalReference oafExtRefToBrokerExtRef(final ExternalReference ref) {
|
||||
return ref != null ? new eu.dnetlib.broker.objects.ExternalReference()
|
||||
.setRefidentifier(ref.getRefidentifier())
|
||||
.setSitename(ref.getSitename())
|
||||
.setType(ref.getQualifier().getClassid())
|
||||
.setUrl(ref.getUrl())
|
||||
: null;
|
||||
}
|
||||
|
||||
public static final eu.dnetlib.broker.objects.Project oafProjectToBrokerProject(final Project p) {
|
||||
if (p == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final eu.dnetlib.broker.objects.Project res = new eu.dnetlib.broker.objects.Project()
|
||||
.setTitle(fieldValue(p.getTitle()))
|
||||
.setAcronym(fieldValue(p.getAcronym()))
|
||||
.setCode(fieldValue(p.getCode()));
|
||||
|
||||
final String ftree = fieldValue(p.getFundingtree());
|
||||
if (StringUtils.isNotBlank(ftree)) {
|
||||
try {
|
||||
final Document fdoc = DocumentHelper.parseText(ftree);
|
||||
res.setFunder(fdoc.valueOf("/fundingtree/funder/shortname"));
|
||||
res.setJurisdiction(fdoc.valueOf("/fundingtree/funder/jurisdiction"));
|
||||
res.setFundingProgram(fdoc.valueOf("//funding_level_0/name"));
|
||||
} catch (final DocumentException e) {
|
||||
log.error("Error in record " + p.getId() + ": invalid fundingtree: " + ftree);
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
public static final eu.dnetlib.broker.objects.Software oafSoftwareToBrokerSoftware(final Software sw) {
|
||||
return sw != null ? new eu.dnetlib.broker.objects.Software()
|
||||
.setName(structPropValue(sw.getTitle()))
|
||||
.setDescription(fieldValue(sw.getDescription()))
|
||||
.setRepository(fieldValue(sw.getCodeRepositoryUrl()))
|
||||
.setLandingPage(fieldValue(sw.getDocumentationUrl()))
|
||||
: null;
|
||||
}
|
||||
|
||||
private static String fieldValue(final Field<String> f) {
|
||||
return f != null ? f.getValue() : null;
|
||||
}
|
||||
|
||||
private static String fieldValue(final List<Field<String>> fl) {
|
||||
return fl != null ? fl.stream().map(Field::getValue).filter(StringUtils::isNotBlank).findFirst().orElse(null)
|
||||
: null;
|
||||
}
|
||||
|
||||
private static String structPropValue(final List<StructuredProperty> props) {
|
||||
return props != null
|
||||
? props.stream().map(StructuredProperty::getValue).filter(StringUtils::isNotBlank).findFirst().orElse(null)
|
||||
: null;
|
||||
}
|
||||
|
||||
private static List<String> fieldList(final List<Field<String>> fl) {
|
||||
return fl != null
|
||||
? fl.stream().map(Field::getValue).filter(StringUtils::isNotBlank).collect(Collectors.toList())
|
||||
: new ArrayList<>();
|
||||
}
|
||||
|
||||
private static List<String> structPropList(final List<StructuredProperty> props) {
|
||||
return props != null
|
||||
? props
|
||||
.stream()
|
||||
.map(StructuredProperty::getValue)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.collect(Collectors.toList())
|
||||
: new ArrayList<>();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingAbstract extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingAbstract> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingAbstract(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/ABSTRACT", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getAbstracts().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingAuthorOrcid extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingAuthorOrcid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingAuthorOrcid(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/AUTHOR/ORCID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingOpenAccess extends UpdateInfo<Instance> {
|
||||
|
||||
public static List<EnrichMissingOpenAccess> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingOpenAccess(final Instance highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/OPENACCESS_VERSION", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getUrl();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingPid extends UpdateInfo<Pid> {
|
||||
|
||||
public static List<EnrichMissingPid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingPid(final Pid highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getPids().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingProject extends UpdateInfo<Project> {
|
||||
|
||||
public static List<EnrichMissingProject> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingProject(final Project highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PROJECT", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getProjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram()
|
||||
+ getHighlightValue().getCode();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingPublicationDate extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingPublicationDate> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingPublicationDate(final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/PUBLICATION_DATE", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().setPublicationdate(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMissingSubject extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMissingSubject> findUpdates(final Result source, final Result target) {
|
||||
// MESHEUROPMC
|
||||
// ARXIV
|
||||
// JEL
|
||||
// DDC
|
||||
// ACM
|
||||
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMissingSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||
super("ENRICH/MISSING/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.Instance;
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMoreOpenAccess extends UpdateInfo<Instance> {
|
||||
|
||||
public static List<EnrichMoreOpenAccess> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMoreOpenAccess(final Instance highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/OPENACCESS_VERSION", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getUrl();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Pid;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMorePid extends UpdateInfo<Pid> {
|
||||
|
||||
public static List<EnrichMorePid> findUpdates(final Result source, final Result target) {
|
||||
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMorePid(final Pid highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/PID", highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getPids().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class EnrichMoreSubject extends UpdateInfo<String> {
|
||||
|
||||
public static List<EnrichMoreSubject> findUpdates(final Result source, final Result target) {
|
||||
// MESHEUROPMC
|
||||
// ARXIV
|
||||
// JEL
|
||||
// DDC
|
||||
// ACM
|
||||
|
||||
return Arrays.asList();
|
||||
}
|
||||
|
||||
private EnrichMoreSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||
super("ENRICH/MORE/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHighlightValueAsString() {
|
||||
return getHighlightValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,86 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.EventFactory;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsReferencedBy;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsRelatedTo;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedBy;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsSupplementedTo;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetReferences;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMissingProject;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedProjects.EnrichMoreProject;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsReferencedBy;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsRelatedTo;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedBy;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationIsSupplementedTo;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedPublications.EnrichMissingPublicationReferences;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMissingSoftware;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware.EnrichMoreSoftware;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAbstract;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingAuthorOrcid;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPid;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingPublicationDate;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMissingSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreOpenAccess;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMorePid;
|
||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreSubject;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.pace.config.DedupConfig;
|
||||
|
||||
public class EventFinder {
|
||||
|
||||
private static List<UpdateMatcher<?>> matchers = new ArrayList<>();
|
||||
static {
|
||||
matchers.add(new EnrichMissingAbstract());
|
||||
matchers.add(new EnrichMissingAuthorOrcid());
|
||||
matchers.add(new EnrichMissingOpenAccess());
|
||||
matchers.add(new EnrichMissingPid());
|
||||
matchers.add(new EnrichMissingPublicationDate());
|
||||
matchers.add(new EnrichMissingSubject());
|
||||
matchers.add(new EnrichMoreOpenAccess());
|
||||
matchers.add(new EnrichMorePid());
|
||||
matchers.add(new EnrichMoreSubject());
|
||||
|
||||
// Advanced matchers
|
||||
matchers.add(new EnrichMissingProject());
|
||||
matchers.add(new EnrichMoreProject());
|
||||
matchers.add(new EnrichMissingSoftware());
|
||||
matchers.add(new EnrichMoreSoftware());
|
||||
matchers.add(new EnrichMissingPublicationIsRelatedTo());
|
||||
matchers.add(new EnrichMissingPublicationIsReferencedBy());
|
||||
matchers.add(new EnrichMissingPublicationReferences());
|
||||
matchers.add(new EnrichMissingPublicationIsSupplementedTo());
|
||||
matchers.add(new EnrichMissingPublicationIsSupplementedBy());
|
||||
matchers.add(new EnrichMissingDatasetIsRelatedTo());
|
||||
matchers.add(new EnrichMissingDatasetIsReferencedBy());
|
||||
matchers.add(new EnrichMissingDatasetReferences());
|
||||
matchers.add(new EnrichMissingDatasetIsSupplementedTo());
|
||||
matchers.add(new EnrichMissingDatasetIsSupplementedBy());
|
||||
matchers.add(new EnrichMissingAbstract());
|
||||
}
|
||||
|
||||
public static EventGroup generateEvents(final ResultGroup results, final DedupConfig dedupConfig) {
|
||||
final List<UpdateInfo<?>> list = new ArrayList<>();
|
||||
|
||||
for (final ResultWithRelations target : results.getData()) {
|
||||
for (final UpdateMatcher<?> matcher : matchers) {
|
||||
list.addAll(matcher.searchUpdatesForRecord(target, results.getData(), dedupConfig));
|
||||
}
|
||||
}
|
||||
|
||||
return asEventGroup(list);
|
||||
}
|
||||
|
||||
private static EventGroup asEventGroup(final List<UpdateInfo<?>> list) {
|
||||
final EventGroup events = new EventGroup();
|
||||
list.stream().map(EventFactory::newBrokerEvent).forEach(events::addElement);
|
||||
return events;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.broker.model.Event;
|
||||
|
||||
public class EventGroup implements Serializable {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 765977943803533130L;
|
||||
|
||||
private final List<Event> data = new ArrayList<>();
|
||||
|
||||
public List<Event> getData() {
|
||||
return data;
|
||||
}
|
||||
|
||||
public EventGroup addElement(final Event elem) {
|
||||
data.add(elem);
|
||||
return this;
|
||||
}
|
||||
|
||||
public EventGroup addGroup(final EventGroup group) {
|
||||
data.addAll(group.getData());
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
public class TrustUtils {
|
||||
|
||||
public static float rescale(final double score, final double threshold) {
|
||||
if (score >= BrokerConstants.MAX_TRUST) {
|
||||
return BrokerConstants.MAX_TRUST;
|
||||
}
|
||||
|
||||
final double val = (score - threshold) * (BrokerConstants.MAX_TRUST - BrokerConstants.MIN_TRUST)
|
||||
/ (BrokerConstants.MAX_TRUST - threshold);
|
||||
|
||||
if (val < BrokerConstants.MIN_TRUST) {
|
||||
return BrokerConstants.MIN_TRUST;
|
||||
}
|
||||
if (val > BrokerConstants.MAX_TRUST) {
|
||||
return BrokerConstants.MAX_TRUST;
|
||||
}
|
||||
|
||||
return (float) val;
|
||||
}
|
||||
}
|
|
@ -1,139 +1,36 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||
import eu.dnetlib.broker.objects.Provenance;
|
||||
import eu.dnetlib.broker.objects.Publication;
|
||||
import eu.dnetlib.dhp.broker.model.Topic;
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.pace.config.DedupConfig;
|
||||
import eu.dnetlib.pace.model.MapDocument;
|
||||
import eu.dnetlib.pace.tree.support.TreeProcessor;
|
||||
import eu.dnetlib.pace.util.MapDocumentUtil;
|
||||
|
||||
public final class UpdateInfo<T> {
|
||||
public abstract class UpdateInfo<T> {
|
||||
|
||||
private final Topic topic;
|
||||
private final String topic;
|
||||
|
||||
private final T highlightValue;
|
||||
|
||||
private final ResultWithRelations source;
|
||||
|
||||
private final ResultWithRelations target;
|
||||
|
||||
private final BiConsumer<Publication, T> compileHighlight;
|
||||
|
||||
private final Function<T, String> highlightToString;
|
||||
|
||||
private final float trust;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(UpdateInfo.class);
|
||||
|
||||
public UpdateInfo(final Topic topic, final T highlightValue, final ResultWithRelations source,
|
||||
final ResultWithRelations target,
|
||||
final BiConsumer<Publication, T> compileHighlight,
|
||||
final Function<T, String> highlightToString,
|
||||
final DedupConfig dedupConfig) {
|
||||
protected UpdateInfo(final String topic, final T highlightValue, final float trust) {
|
||||
this.topic = topic;
|
||||
this.highlightValue = highlightValue;
|
||||
this.source = source;
|
||||
this.target = target;
|
||||
this.compileHighlight = compileHighlight;
|
||||
this.highlightToString = highlightToString;
|
||||
this.trust = calculateTrust(dedupConfig, source.getResult(), target.getResult());
|
||||
this.trust = trust;
|
||||
}
|
||||
|
||||
public T getHighlightValue() {
|
||||
return highlightValue;
|
||||
}
|
||||
|
||||
public ResultWithRelations getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public ResultWithRelations getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
private float calculateTrust(final DedupConfig dedupConfig, final Result r1, final Result r2) {
|
||||
try {
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
final MapDocument doc1 = MapDocumentUtil
|
||||
.asMapDocumentWithJPath(dedupConfig, objectMapper.writeValueAsString(r1));
|
||||
final MapDocument doc2 = MapDocumentUtil
|
||||
.asMapDocumentWithJPath(dedupConfig, objectMapper.writeValueAsString(r2));
|
||||
|
||||
final double score = new TreeProcessor(dedupConfig).computeScore(doc1, doc2);
|
||||
final double threshold = dedupConfig.getWf().getThreshold();
|
||||
|
||||
return TrustUtils.rescale(score, threshold);
|
||||
} catch (final Exception e) {
|
||||
log.error("Error computing score between results", e);
|
||||
return BrokerConstants.MIN_TRUST;
|
||||
}
|
||||
}
|
||||
|
||||
protected Topic getTopic() {
|
||||
return topic;
|
||||
}
|
||||
|
||||
public String getTopicPath() {
|
||||
return topic.getPath();
|
||||
}
|
||||
|
||||
public float getTrust() {
|
||||
return trust;
|
||||
}
|
||||
|
||||
public String getHighlightValueAsString() {
|
||||
return highlightToString.apply(getHighlightValue());
|
||||
public String getTopic() {
|
||||
return topic;
|
||||
}
|
||||
|
||||
public OpenAireEventPayload asBrokerPayload() {
|
||||
abstract public void compileHighlight(OpenAireEventPayload payload);
|
||||
|
||||
final Publication p = ConversionUtils.oafResultToBrokerPublication(getSource().getResult());
|
||||
compileHighlight.accept(p, getHighlightValue());
|
||||
|
||||
final Publication hl = new Publication();
|
||||
compileHighlight.accept(hl, getHighlightValue());
|
||||
|
||||
final String provId = getSource().getResult().getOriginalId().stream().findFirst().orElse(null);
|
||||
final String provRepo = getSource()
|
||||
.getResult()
|
||||
.getCollectedfrom()
|
||||
.stream()
|
||||
.map(KeyValue::getValue)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
final String provUrl = getSource()
|
||||
.getResult()
|
||||
.getInstance()
|
||||
.stream()
|
||||
.map(Instance::getUrl)
|
||||
.flatMap(List::stream)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
;
|
||||
|
||||
final Provenance provenance = new Provenance().setId(provId).setRepositoryName(provRepo).setUrl(provUrl);
|
||||
|
||||
return new OpenAireEventPayload()
|
||||
.setPublication(p)
|
||||
.setHighlight(hl)
|
||||
.setTrust(trust)
|
||||
.setProvenance(provenance);
|
||||
}
|
||||
abstract public String getHighlightValueAsString();
|
||||
|
||||
}
|
||||
|
|
|
@ -1,51 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.simple;
|
||||
|
||||
import org.apache.spark.sql.Encoder;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.expressions.Aggregator;
|
||||
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class ResultAggregator extends Aggregator<Tuple2<ResultWithRelations, Relation>, ResultGroup, ResultGroup> {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -1492327874705585538L;
|
||||
|
||||
@Override
|
||||
public ResultGroup zero() {
|
||||
return new ResultGroup();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultGroup reduce(final ResultGroup group, final Tuple2<ResultWithRelations, Relation> t) {
|
||||
return group.addElement(t._1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultGroup merge(final ResultGroup g1, final ResultGroup g2) {
|
||||
return g1.addGroup(g2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultGroup finish(final ResultGroup group) {
|
||||
return group;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Encoder<ResultGroup> bufferEncoder() {
|
||||
return Encoders.kryo(ResultGroup.class);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Encoder<ResultGroup> outputEncoder() {
|
||||
return Encoders.kryo(ResultGroup.class);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.simple;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
||||
|
||||
public class ResultGroup implements Serializable {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -3360828477088669296L;
|
||||
|
||||
private final List<ResultWithRelations> data = new ArrayList<>();
|
||||
|
||||
public List<ResultWithRelations> getData() {
|
||||
return data;
|
||||
}
|
||||
|
||||
public ResultGroup addElement(final ResultWithRelations elem) {
|
||||
data.add(elem);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultGroup addGroup(final ResultGroup group) {
|
||||
data.addAll(group.getData());
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isValid() {
|
||||
return data.size() > 1;
|
||||
}
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
|
||||
public class RelatedDataset {
|
||||
|
||||
private final String source;
|
||||
private final String relType;
|
||||
private final Dataset relDataset;
|
||||
|
||||
public RelatedDataset(final String source, final String relType, final Dataset relDataset) {
|
||||
this.source = source;
|
||||
this.relType = relType;
|
||||
this.relDataset = relDataset;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
||||
public Dataset getRelDataset() {
|
||||
return relDataset;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
|
||||
public class RelatedEntityFactory {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <RT, T> RT newRelatedEntity(final String sourceId, final String relType, final T target,
|
||||
final Class<RT> clazz) {
|
||||
if (clazz == RelatedProject.class) {
|
||||
return (RT) new RelatedProject(sourceId, relType, (Project) target);
|
||||
}
|
||||
if (clazz == RelatedSoftware.class) {
|
||||
return (RT) new RelatedSoftware(sourceId, relType, (Software) target);
|
||||
}
|
||||
if (clazz == RelatedDataset.class) {
|
||||
return (RT) new RelatedDataset(sourceId, relType, (Dataset) target);
|
||||
}
|
||||
if (clazz == RelatedPublication.class) {
|
||||
return (RT) new RelatedPublication(sourceId, relType, (Publication) target);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
|
||||
public class RelatedProject {
|
||||
|
||||
private final String source;
|
||||
private final String relType;
|
||||
private final Project relProject;
|
||||
|
||||
public RelatedProject(final String source, final String relType, final Project relProject) {
|
||||
this.source = source;
|
||||
this.relType = relType;
|
||||
this.relProject = relProject;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
||||
public Project getRelProject() {
|
||||
return relProject;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
|
||||
public class RelatedPublication {
|
||||
|
||||
private final String source;
|
||||
private final String relType;
|
||||
private final Publication relPublication;
|
||||
|
||||
public RelatedPublication(final String source, final String relType, final Publication relPublication) {
|
||||
this.source = source;
|
||||
this.relType = relType;
|
||||
this.relPublication = relPublication;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
||||
public Publication getRelPublication() {
|
||||
return relPublication;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
|
||||
public class RelatedSoftware {
|
||||
|
||||
private final String source;
|
||||
private final String relType;
|
||||
private final Software relSoftware;
|
||||
|
||||
public RelatedSoftware(final String source, final String relType, final Software relSoftware) {
|
||||
this.source = source;
|
||||
this.relType = relType;
|
||||
this.relSoftware = relSoftware;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String getRelType() {
|
||||
return relType;
|
||||
}
|
||||
|
||||
public Software getRelSoftware() {
|
||||
return relSoftware;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
public class ResultWithRelations implements Serializable {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -1368401915974311571L;
|
||||
|
||||
private Result result;
|
||||
|
||||
private final List<RelatedDataset> datasets = new ArrayList<>();
|
||||
private final List<RelatedPublication> publications = new ArrayList<>();
|
||||
private final List<RelatedSoftware> softwares = new ArrayList<>();
|
||||
private final List<RelatedProject> projects = new ArrayList<>();
|
||||
|
||||
public ResultWithRelations() {
|
||||
}
|
||||
|
||||
public ResultWithRelations(final Result result) {
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
public Result getResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<RelatedDataset> getDatasets() {
|
||||
return datasets;
|
||||
}
|
||||
|
||||
public List<RelatedPublication> getPublications() {
|
||||
return publications;
|
||||
}
|
||||
|
||||
public List<RelatedSoftware> getSoftwares() {
|
||||
return softwares;
|
||||
}
|
||||
|
||||
public List<RelatedProject> getProjects() {
|
||||
return projects;
|
||||
}
|
||||
|
||||
public void setResult(final Result result) {
|
||||
this.result = result;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||
|
||||
import org.apache.spark.sql.Encoder;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.expressions.Aggregator;
|
||||
|
||||
import scala.Tuple2;
|
||||
|
||||
public class ResultWithRelationsAggregator<T>
|
||||
extends Aggregator<Tuple2<ResultWithRelations, T>, ResultWithRelations, ResultWithRelations> {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -3687878788861013488L;
|
||||
|
||||
@Override
|
||||
public ResultWithRelations zero() {
|
||||
return new ResultWithRelations();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultWithRelations finish(final ResultWithRelations g) {
|
||||
return g;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultWithRelations reduce(final ResultWithRelations g, final Tuple2<ResultWithRelations, T> t) {
|
||||
if (g.getResult() == null) {
|
||||
return t._1;
|
||||
} else if (t._2 instanceof RelatedSoftware) {
|
||||
g.getSoftwares().add((RelatedSoftware) t._2);
|
||||
} else if (t._2 instanceof RelatedDataset) {
|
||||
g.getDatasets().add((RelatedDataset) t._2);
|
||||
} else if (t._2 instanceof RelatedPublication) {
|
||||
g.getPublications().add((RelatedPublication) t._2);
|
||||
} else if (t._2 instanceof RelatedProject) {
|
||||
g.getProjects().add((RelatedProject) t._2);
|
||||
}
|
||||
return g;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultWithRelations merge(final ResultWithRelations g1, final ResultWithRelations g2) {
|
||||
if (g1.getResult() != null) {
|
||||
g1.getSoftwares().addAll(g2.getSoftwares());
|
||||
g1.getDatasets().addAll(g2.getDatasets());
|
||||
g1.getPublications().addAll(g2.getPublications());
|
||||
g1.getProjects().addAll(g2.getProjects());
|
||||
return g1;
|
||||
} else {
|
||||
return g2;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Encoder<ResultWithRelations> bufferEncoder() {
|
||||
return Encoders.kryo(ResultWithRelations.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Encoder<ResultWithRelations> outputEncoder() {
|
||||
return Encoders.kryo(ResultWithRelations.class);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>spark2</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,99 +0,0 @@
|
|||
<workflow-app name="create broker events" xmlns="uri:oozie:workflow:0.5">
|
||||
|
||||
<parameters>
|
||||
<property>
|
||||
<name>graphInputPath</name>
|
||||
<description>the path where the graph is stored</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>eventsOutputPath</name>
|
||||
<description>the path where the the events will be stored</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>isLookupUrl</name>
|
||||
<description>the address of the lookUp service</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>dedupConfProfId</name>
|
||||
<description>the id of a valid Dedup Configuration Profile</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="generate_events"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="generate_events">
|
||||
<java>
|
||||
<prepare>
|
||||
<delete path="${eventsOutputPath}"/>
|
||||
</prepare>
|
||||
<main-class>eu.dnetlib.dhp.broker.oa.GenerateEventsApplication</main-class>
|
||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||
<arg>--eventsPath</arg><arg>${eventsOutputPath}</arg>
|
||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||
<arg>--dedupConfProfile</arg><arg>${dedupConfProfId}</arg>
|
||||
</java>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue