forked from D-Net/dnet-hadoop
integrated changes from master
This commit is contained in:
commit
637653cba3
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-build</artifactId>
|
<artifactId>dhp-build</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>dhp-build-assembly-resources</artifactId>
|
<artifactId>dhp-build-assembly-resources</artifactId>
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-build</artifactId>
|
<artifactId>dhp-build</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>dhp-build-properties-maven-plugin</artifactId>
|
<artifactId>dhp-build-properties-maven-plugin</artifactId>
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-code-style</artifactId>
|
<artifactId>dhp-code-style</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
|
|
||||||
<packaging>jar</packaging>
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp</artifactId>
|
<artifactId>dhp</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<artifactId>dhp-build</artifactId>
|
<artifactId>dhp-build</artifactId>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp</artifactId>
|
<artifactId>dhp</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
<relativePath>../</relativePath>
|
<relativePath>../</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp</artifactId>
|
<artifactId>dhp</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
<relativePath>../</relativePath>
|
<relativePath>../</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|
|
@ -6,36 +6,86 @@ import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
public class ModelConstants {
|
public class ModelConstants {
|
||||||
|
|
||||||
public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
|
public static final String DNET_RESULT_TYPOLOGIES = "dnet:result_typologies";
|
||||||
|
public static final String DNET_PUBLICATION_RESOURCE = "dnet:publication_resource";
|
||||||
|
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
|
||||||
|
public static final String DNET_LANGUAGES = "dnet:languages";
|
||||||
|
public static final String DNET_PID_TYPES = "dnet:pid_types";
|
||||||
|
public static final String DNET_DATA_CITE_DATE = "dnet:dataCite_date";
|
||||||
|
public static final String DNET_DATA_CITE_RESOURCE = "dnet:dataCite_resource";
|
||||||
|
public static final String DNET_PROVENANCE_ACTIONS = "dnet:provenanceActions";
|
||||||
|
|
||||||
|
public static final String SYSIMPORT_CROSSWALK_REPOSITORY = "sysimport:crosswalk:repository";
|
||||||
|
public static final String SYSIMPORT_CROSSWALK_ENTITYREGISTRY = "sysimport:crosswalk:entityregistry";
|
||||||
|
public static final String USER_CLAIM = "user:claim";
|
||||||
|
|
||||||
public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
|
public static final String DATASET_RESULTTYPE_CLASSID = "dataset";
|
||||||
public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
|
public static final String PUBLICATION_RESULTTYPE_CLASSID = "publication";
|
||||||
public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
|
public static final String SOFTWARE_RESULTTYPE_CLASSID = "software";
|
||||||
public static final String ORP_RESULTTYPE_CLASSID = "other";
|
public static final String ORP_RESULTTYPE_CLASSID = "other";
|
||||||
|
|
||||||
public static Qualifier PUBLICATION_DEFAULT_RESULTTYPE = new Qualifier();
|
public static final String RESULT_RESULT = "resultResult";
|
||||||
public static Qualifier DATASET_DEFAULT_RESULTTYPE = new Qualifier();
|
public static final String PUBLICATION_DATASET = "publicationDataset";
|
||||||
public static Qualifier SOFTWARE_DEFAULT_RESULTTYPE = new Qualifier();
|
public static final String IS_RELATED_TO = "isRelatedTo";
|
||||||
public static Qualifier ORP_DEFAULT_RESULTTYPE = new Qualifier();
|
public static final String SUPPLEMENT = "supplement";
|
||||||
|
public static final String IS_SUPPLEMENT_TO = "isSupplementTo";
|
||||||
|
public static final String IS_SUPPLEMENTED_BY = "isSupplementedBy";
|
||||||
|
public static final String PART = "part";
|
||||||
|
public static final String IS_PART_OF = "IsPartOf";
|
||||||
|
public static final String HAS_PARTS = "HasParts";
|
||||||
|
public static final String RELATIONSHIP = "relationship";
|
||||||
|
|
||||||
static {
|
public static final String RESULT_PROJECT = "resultProject";
|
||||||
PUBLICATION_DEFAULT_RESULTTYPE.setClassid(PUBLICATION_RESULTTYPE_CLASSID);
|
public static final String OUTCOME = "outcome";
|
||||||
PUBLICATION_DEFAULT_RESULTTYPE.setClassname(PUBLICATION_RESULTTYPE_CLASSID);
|
public static final String IS_PRODUCED_BY = "isProducedBy";
|
||||||
PUBLICATION_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
|
public static final String PRODUCES = "produces";
|
||||||
PUBLICATION_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
|
|
||||||
|
|
||||||
DATASET_DEFAULT_RESULTTYPE.setClassid(DATASET_RESULTTYPE_CLASSID);
|
public static final String DATASOURCE_ORGANIZATION = "datasourceOrganization";
|
||||||
DATASET_DEFAULT_RESULTTYPE.setClassname(DATASET_RESULTTYPE_CLASSID);
|
public static final String PROVISION = "provision";
|
||||||
DATASET_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
|
public static final String IS_PROVIDED_BY = "isProvidedBy";
|
||||||
DATASET_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
|
public static final String PROVIDES = "provides";
|
||||||
|
|
||||||
SOFTWARE_DEFAULT_RESULTTYPE.setClassid(SOFTWARE_RESULTTYPE_CLASSID);
|
public static final String PROJECT_ORGANIZATION = "projectOrganization";
|
||||||
SOFTWARE_DEFAULT_RESULTTYPE.setClassname(SOFTWARE_RESULTTYPE_CLASSID);
|
public static final String PARTICIPATION = "participation";
|
||||||
SOFTWARE_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
|
public static final String HAS_PARTICIPANT = "hasParticipant";
|
||||||
SOFTWARE_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
|
public static final String IS_PARTICIPANT = "isParticipant";
|
||||||
|
|
||||||
ORP_DEFAULT_RESULTTYPE.setClassid(ORP_RESULTTYPE_CLASSID);
|
public static final String UNKNOWN = "UNKNOWN";
|
||||||
ORP_DEFAULT_RESULTTYPE.setClassname(ORP_RESULTTYPE_CLASSID);
|
public static final String NOT_AVAILABLE = "not available";
|
||||||
ORP_DEFAULT_RESULTTYPE.setSchemeid(DNET_RESULT_TYPOLOGIES);
|
|
||||||
ORP_DEFAULT_RESULTTYPE.setSchemename(DNET_RESULT_TYPOLOGIES);
|
public static final Qualifier PUBLICATION_DEFAULT_RESULTTYPE = qualifier(
|
||||||
|
PUBLICATION_RESULTTYPE_CLASSID, PUBLICATION_RESULTTYPE_CLASSID,
|
||||||
|
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||||
|
|
||||||
|
public static final Qualifier DATASET_DEFAULT_RESULTTYPE = qualifier(
|
||||||
|
DATASET_RESULTTYPE_CLASSID, DATASET_RESULTTYPE_CLASSID,
|
||||||
|
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||||
|
|
||||||
|
public static final Qualifier SOFTWARE_DEFAULT_RESULTTYPE = qualifier(
|
||||||
|
SOFTWARE_RESULTTYPE_CLASSID, SOFTWARE_RESULTTYPE_CLASSID,
|
||||||
|
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||||
|
|
||||||
|
public static final Qualifier ORP_DEFAULT_RESULTTYPE = qualifier(
|
||||||
|
ORP_RESULTTYPE_CLASSID, ORP_RESULTTYPE_CLASSID,
|
||||||
|
DNET_RESULT_TYPOLOGIES, DNET_RESULT_TYPOLOGIES);
|
||||||
|
|
||||||
|
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
|
||||||
|
SYSIMPORT_CROSSWALK_REPOSITORY, SYSIMPORT_CROSSWALK_REPOSITORY,
|
||||||
|
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
|
||||||
|
|
||||||
|
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
|
||||||
|
SYSIMPORT_CROSSWALK_ENTITYREGISTRY, SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
|
||||||
|
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
|
||||||
|
|
||||||
|
private static Qualifier qualifier(
|
||||||
|
final String classid,
|
||||||
|
final String classname,
|
||||||
|
final String schemeid,
|
||||||
|
final String schemename) {
|
||||||
|
final Qualifier q = new Qualifier();
|
||||||
|
q.setClassid(classid);
|
||||||
|
q.setClassname(classname);
|
||||||
|
q.setSchemeid(schemeid);
|
||||||
|
q.setSchemename(schemename);
|
||||||
|
return q;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<artifactId>dhp-actionmanager</artifactId>
|
<artifactId>dhp-actionmanager</artifactId>
|
||||||
|
|
||||||
|
|
|
@ -84,8 +84,11 @@ public class MigrateActionSet {
|
||||||
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
final List<Path> sourcePaths = getSourcePaths(sourceNN, isLookUp);
|
||||||
log
|
log
|
||||||
.info(
|
.info(
|
||||||
"paths to process:\n{}",
|
"paths to process:\n{}", sourcePaths
|
||||||
sourcePaths.stream().map(p -> p.toString()).collect(Collectors.joining("\n")));
|
.stream()
|
||||||
|
.map(p -> p.toString())
|
||||||
|
.collect(Collectors.joining("\n")));
|
||||||
|
|
||||||
for (Path source : sourcePaths) {
|
for (Path source : sourcePaths) {
|
||||||
|
|
||||||
if (!sourceFS.exists(source)) {
|
if (!sourceFS.exists(source)) {
|
||||||
|
@ -119,9 +122,8 @@ public class MigrateActionSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
props
|
final String targetPathsCsv = targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(","));
|
||||||
.setProperty(
|
props.setProperty(TARGET_PATHS, targetPathsCsv);
|
||||||
TARGET_PATHS, targetPaths.stream().map(p -> p.toString()).collect(Collectors.joining(",")));
|
|
||||||
File file = new File(System.getProperty("oozie.action.output.properties"));
|
File file = new File(System.getProperty("oozie.action.output.properties"));
|
||||||
|
|
||||||
try (OutputStream os = new FileOutputStream(file)) {
|
try (OutputStream os = new FileOutputStream(file)) {
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.migration;
|
package eu.dnetlib.dhp.actionmanager.migration;
|
||||||
|
|
||||||
import static eu.dnetlib.data.proto.KindProtos.Kind.entity;
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
import static eu.dnetlib.data.proto.KindProtos.Kind.relation;
|
|
||||||
import static eu.dnetlib.data.proto.TypeProtos.*;
|
|
||||||
import static eu.dnetlib.data.proto.TypeProtos.Type.*;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
@ -21,10 +19,6 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
|
||||||
public class ProtoConverter implements Serializable {
|
public class ProtoConverter implements Serializable {
|
||||||
|
|
||||||
public static final String UNKNOWN = "UNKNOWN";
|
|
||||||
public static final String NOT_AVAILABLE = "not available";
|
|
||||||
public static final String DNET_ACCESS_MODES = "dnet:access_modes";
|
|
||||||
|
|
||||||
public static Oaf convert(OafProtos.Oaf oaf) {
|
public static Oaf convert(OafProtos.Oaf oaf) {
|
||||||
try {
|
try {
|
||||||
switch (oaf.getKind()) {
|
switch (oaf.getKind()) {
|
||||||
|
@ -64,6 +58,7 @@ public class ProtoConverter implements Serializable {
|
||||||
case result:
|
case result:
|
||||||
final Result r = convertResult(oaf);
|
final Result r = convertResult(oaf);
|
||||||
r.setInstance(convertInstances(oaf));
|
r.setInstance(convertInstances(oaf));
|
||||||
|
r.setExternalReference(convertExternalRefs(oaf));
|
||||||
return r;
|
return r;
|
||||||
case project:
|
case project:
|
||||||
return convertProject(oaf);
|
return convertProject(oaf);
|
||||||
|
@ -94,13 +89,44 @@ public class ProtoConverter implements Serializable {
|
||||||
i.setHostedby(mapKV(ri.getHostedby()));
|
i.setHostedby(mapKV(ri.getHostedby()));
|
||||||
i.setInstancetype(mapQualifier(ri.getInstancetype()));
|
i.setInstancetype(mapQualifier(ri.getInstancetype()));
|
||||||
i.setLicense(mapStringField(ri.getLicense()));
|
i.setLicense(mapStringField(ri.getLicense()));
|
||||||
i.setUrl(ri.getUrlList());
|
i
|
||||||
|
.setUrl(
|
||||||
|
ri.getUrlList() != null ? ri
|
||||||
|
.getUrlList()
|
||||||
|
.stream()
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new)) : null);
|
||||||
i.setRefereed(mapStringField(ri.getRefereed()));
|
i.setRefereed(mapStringField(ri.getRefereed()));
|
||||||
i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount()));
|
i.setProcessingchargeamount(mapStringField(ri.getProcessingchargeamount()));
|
||||||
i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency()));
|
i.setProcessingchargecurrency(mapStringField(ri.getProcessingchargecurrency()));
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static List<ExternalReference> convertExternalRefs(OafProtos.Oaf oaf) {
|
||||||
|
ResultProtos.Result r = oaf.getEntity().getResult();
|
||||||
|
if (r.getExternalReferenceCount() > 0) {
|
||||||
|
return r
|
||||||
|
.getExternalReferenceList()
|
||||||
|
.stream()
|
||||||
|
.map(e -> convertExtRef(e))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
return Lists.newArrayList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ExternalReference convertExtRef(ResultProtos.Result.ExternalReference e) {
|
||||||
|
ExternalReference ex = new ExternalReference();
|
||||||
|
ex.setUrl(e.getUrl());
|
||||||
|
ex.setSitename(e.getSitename());
|
||||||
|
ex.setRefidentifier(e.getRefidentifier());
|
||||||
|
ex.setQuery(e.getQuery());
|
||||||
|
ex.setQualifier(mapQualifier(e.getQualifier()));
|
||||||
|
ex.setLabel(e.getLabel());
|
||||||
|
ex.setDescription(e.getDescription());
|
||||||
|
ex.setDataInfo(ex.getDataInfo());
|
||||||
|
return ex;
|
||||||
|
}
|
||||||
|
|
||||||
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
private static Organization convertOrganization(OafProtos.Oaf oaf) {
|
||||||
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
|
final OrganizationProtos.Organization.Metadata m = oaf.getEntity().getOrganization().getMetadata();
|
||||||
final Organization org = setOaf(new Organization(), oaf);
|
final Organization org = setOaf(new Organization(), oaf);
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<artifactId>dhp-aggregation</artifactId>
|
<artifactId>dhp-aggregation</artifactId>
|
||||||
|
|
||||||
|
|
|
@ -1,17 +1,21 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection;
|
package eu.dnetlib.dhp.collection;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.cli.*;
|
import org.apache.commons.cli.*;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.hadoop.io.IntWritable;
|
import org.apache.hadoop.io.IntWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
@ -23,6 +27,8 @@ import org.apache.spark.util.LongAccumulator;
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
@ -35,6 +41,8 @@ import eu.dnetlib.message.MessageType;
|
||||||
|
|
||||||
public class GenerateNativeStoreSparkJob {
|
public class GenerateNativeStoreSparkJob {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(GenerateNativeStoreSparkJob.class);
|
||||||
|
|
||||||
public static MetadataRecord parseRecord(
|
public static MetadataRecord parseRecord(
|
||||||
final String input,
|
final String input,
|
||||||
final String xpath,
|
final String xpath,
|
||||||
|
@ -78,24 +86,28 @@ public class GenerateNativeStoreSparkJob {
|
||||||
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
final Provenance provenance = jsonMapper.readValue(parser.get("provenance"), Provenance.class);
|
||||||
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
||||||
|
|
||||||
final SparkSession spark = SparkSession
|
Boolean isSparkSessionManaged = Optional
|
||||||
.builder()
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.appName("GenerateNativeStoreSparkJob")
|
.map(Boolean::valueOf)
|
||||||
.master(parser.get("master"))
|
.orElse(Boolean.TRUE);
|
||||||
.getOrCreate();
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final Map<String, String> ongoingMap = new HashMap<>();
|
final Map<String, String> ongoingMap = new HashMap<>();
|
||||||
final Map<String, String> reportMap = new HashMap<>();
|
final Map<String, String> reportMap = new HashMap<>();
|
||||||
|
|
||||||
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
final JavaPairRDD<IntWritable, Text> inputRDD = sc
|
final JavaPairRDD<IntWritable, Text> inputRDD = sc
|
||||||
.sequenceFile(parser.get("input"), IntWritable.class, Text.class);
|
.sequenceFile(parser.get("input"), IntWritable.class, Text.class);
|
||||||
|
|
||||||
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
|
final LongAccumulator totalItems = sc.sc().longAccumulator("TotalItems");
|
||||||
|
|
||||||
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
|
final LongAccumulator invalidRecords = sc.sc().longAccumulator("InvalidRecords");
|
||||||
|
|
||||||
final MessageManager manager = new MessageManager(
|
final MessageManager manager = new MessageManager(
|
||||||
|
@ -157,5 +169,7 @@ public class GenerateNativeStoreSparkJob {
|
||||||
false);
|
false);
|
||||||
manager.close();
|
manager.close();
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.transformation;
|
package eu.dnetlib.dhp.transformation;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.cli.*;
|
import org.apache.commons.cli.*;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoder;
|
import org.apache.spark.sql.Encoder;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -17,8 +21,11 @@ import org.dom4j.Document;
|
||||||
import org.dom4j.DocumentException;
|
import org.dom4j.DocumentException;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.collection.GenerateNativeStoreSparkJob;
|
||||||
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
||||||
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
|
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
|
||||||
import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper;
|
import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper;
|
||||||
|
@ -29,6 +36,8 @@ import eu.dnetlib.message.MessageType;
|
||||||
|
|
||||||
public class TransformSparkJobNode {
|
public class TransformSparkJobNode {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class);
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
@ -40,12 +49,18 @@ public class TransformSparkJobNode {
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String inputPath = parser.get("input");
|
final String inputPath = parser.get("input");
|
||||||
final String outputPath = parser.get("output");
|
final String outputPath = parser.get("output");
|
||||||
final String workflowId = parser.get("workflowId");
|
final String workflowId = parser.get("workflowId");
|
||||||
final String trasformationRule = extractXSLTFromTR(
|
final String trasformationRule = extractXSLTFromTR(
|
||||||
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
|
Objects.requireNonNull(DHPUtils.decompressString(parser.get("transformationRule"))));
|
||||||
final String master = parser.get("master");
|
|
||||||
final String rabbitUser = parser.get("rabbitUser");
|
final String rabbitUser = parser.get("rabbitUser");
|
||||||
final String rabbitPassword = parser.get("rabbitPassword");
|
final String rabbitPassword = parser.get("rabbitPassword");
|
||||||
final String rabbitHost = parser.get("rabbitHost");
|
final String rabbitHost = parser.get("rabbitHost");
|
||||||
|
@ -53,12 +68,11 @@ public class TransformSparkJobNode {
|
||||||
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
final long dateOfCollection = new Long(parser.get("dateOfCollection"));
|
||||||
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
final boolean test = parser.get("isTest") == null ? false : Boolean.valueOf(parser.get("isTest"));
|
||||||
|
|
||||||
final SparkSession spark = SparkSession
|
SparkConf conf = new SparkConf();
|
||||||
.builder()
|
runWithSparkSession(
|
||||||
.appName("TransformStoreSparkJob")
|
conf,
|
||||||
.master(master)
|
isSparkSessionManaged,
|
||||||
.getOrCreate();
|
spark -> {
|
||||||
|
|
||||||
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
||||||
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
|
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
|
||||||
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
|
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
|
||||||
|
@ -82,7 +96,8 @@ public class TransformSparkJobNode {
|
||||||
reportMap.put("mdStoreSize", "" + transformedItems.value());
|
reportMap.put("mdStoreSize", "" + transformedItems.value());
|
||||||
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
System.out.println(new Message(workflowId, "Transform", MessageType.REPORT, reportMap));
|
||||||
if (!test) {
|
if (!test) {
|
||||||
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false, false,
|
final MessageManager manager = new MessageManager(rabbitHost, rabbitUser, rabbitPassword, false,
|
||||||
|
false,
|
||||||
null);
|
null);
|
||||||
manager
|
manager
|
||||||
.sendMessage(
|
.sendMessage(
|
||||||
|
@ -93,6 +108,8 @@ public class TransformSparkJobNode {
|
||||||
manager.close();
|
manager.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String extractXSLTFromTR(final String tr) throws DocumentException {
|
private static String extractXSLTFromTR(final String tr) throws DocumentException {
|
||||||
|
|
|
@ -1,16 +1,86 @@
|
||||||
[
|
[
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{
|
||||||
{"paramName":"e", "paramLongName":"encoding", "paramDescription": "the encoding of the input record should be JSON or XML", "paramRequired": true},
|
"paramName": "issm",
|
||||||
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true},
|
"paramLongName": "isSparkSessionManaged",
|
||||||
{"paramName":"p", "paramLongName":"provenance", "paramDescription": "the infos about the provenance of the collected records", "paramRequired": true},
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
{"paramName":"x", "paramLongName":"xpath", "paramDescription": "the xpath to identify the record ifentifier", "paramRequired": true},
|
"paramRequired": false
|
||||||
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true},
|
},
|
||||||
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true},
|
{
|
||||||
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true},
|
"paramName": "e",
|
||||||
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true},
|
"paramLongName": "encoding",
|
||||||
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true},
|
"paramDescription": "the encoding of the input record should be JSON or XML",
|
||||||
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true},
|
"paramRequired": true
|
||||||
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true},
|
},
|
||||||
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true},
|
{
|
||||||
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false}
|
"paramName": "d",
|
||||||
|
"paramLongName": "dateOfCollection",
|
||||||
|
"paramDescription": "the date when the record has been stored",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "p",
|
||||||
|
"paramLongName": "provenance",
|
||||||
|
"paramDescription": "the infos about the provenance of the collected records",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "x",
|
||||||
|
"paramLongName": "xpath",
|
||||||
|
"paramDescription": "the xpath to identify the record identifier",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "i",
|
||||||
|
"paramLongName": "input",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "o",
|
||||||
|
"paramLongName": "output",
|
||||||
|
"paramDescription": "the path of the result DataFrame on HDFS",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ru",
|
||||||
|
"paramLongName": "rabbitUser",
|
||||||
|
"paramDescription": "the user to connect with RabbitMq for messaging",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rp",
|
||||||
|
"paramLongName": "rabbitPassword",
|
||||||
|
"paramDescription": "the password to connect with RabbitMq for messaging",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rh",
|
||||||
|
"paramLongName": "rabbitHost",
|
||||||
|
"paramDescription": "the host of the RabbitMq server",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ro",
|
||||||
|
"paramLongName": "rabbitOngoingQueue",
|
||||||
|
"paramDescription": "the name of the ongoing queue",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rr",
|
||||||
|
"paramLongName": "rabbitReportQueue",
|
||||||
|
"paramDescription": "the name of the report queue",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "w",
|
||||||
|
"paramLongName": "workflowId",
|
||||||
|
"paramDescription": "the identifier of the dnet Workflow",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "t",
|
||||||
|
"paramLongName": "isTest",
|
||||||
|
"paramDescription": "the name of the report queue",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
]
|
]
|
|
@ -1,16 +1,74 @@
|
||||||
[
|
[
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{
|
||||||
{"paramName":"d", "paramLongName":"dateOfCollection", "paramDescription": "the date when the record has been stored", "paramRequired": true},
|
"paramName": "issm",
|
||||||
{"paramName":"i", "paramLongName":"input", "paramDescription": "the path of the sequencial file to read", "paramRequired": true},
|
"paramLongName": "isSparkSessionManaged",
|
||||||
{"paramName":"o", "paramLongName":"output", "paramDescription": "the path of the result DataFrame on HDFS", "paramRequired": true},
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
{"paramName":"w", "paramLongName":"workflowId", "paramDescription": "the identifier of the dnet Workflow", "paramRequired": true},
|
"paramRequired": false
|
||||||
{"paramName":"tr", "paramLongName":"transformationRule","paramDescription": "the transformation Rule to apply to the input MDStore", "paramRequired": true},
|
},
|
||||||
{"paramName":"ru", "paramLongName":"rabbitUser", "paramDescription": "the user to connect with RabbitMq for messaging", "paramRequired": true},
|
{
|
||||||
{"paramName":"rp", "paramLongName":"rabbitPassword", "paramDescription": "the password to connect with RabbitMq for messaging", "paramRequired": true},
|
"paramName": "d",
|
||||||
{"paramName":"rh", "paramLongName":"rabbitHost", "paramDescription": "the host of the RabbitMq server", "paramRequired": true},
|
"paramLongName": "dateOfCollection",
|
||||||
{"paramName":"ro", "paramLongName":"rabbitOngoingQueue", "paramDescription": "the name of the ongoing queue", "paramRequired": true},
|
"paramDescription": "the date when the record has been stored",
|
||||||
{"paramName":"rr", "paramLongName":"rabbitReportQueue", "paramDescription": "the name of the report queue", "paramRequired": true},
|
"paramRequired": true
|
||||||
{"paramName":"t", "paramLongName":"isTest", "paramDescription": "the name of the report queue", "paramRequired": false}
|
},
|
||||||
|
{
|
||||||
|
"paramName": "i",
|
||||||
|
"paramLongName": "input",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "o",
|
||||||
|
"paramLongName": "output",
|
||||||
|
"paramDescription": "the path of the result DataFrame on HDFS",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "w",
|
||||||
|
"paramLongName": "workflowId",
|
||||||
|
"paramDescription": "the identifier of the dnet Workflow",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "tr",
|
||||||
|
"paramLongName": "transformationRule",
|
||||||
|
"paramDescription": "the transformation Rule to apply to the input MDStore",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ru",
|
||||||
|
"paramLongName": "rabbitUser",
|
||||||
|
"paramDescription": "the user to connect with RabbitMq for messaging",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rp",
|
||||||
|
"paramLongName": "rabbitPassword",
|
||||||
|
"paramDescription": "the password to connect with RabbitMq for messaging",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rh",
|
||||||
|
"paramLongName": "rabbitHost",
|
||||||
|
"paramDescription": "the host of the RabbitMq server",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ro",
|
||||||
|
"paramLongName": "rabbitOngoingQueue",
|
||||||
|
"paramDescription": "the name of the ongoing queue",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rr",
|
||||||
|
"paramLongName": "rabbitReportQueue",
|
||||||
|
"paramDescription": "the name of the report queue",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "t",
|
||||||
|
"paramLongName": "isTest",
|
||||||
|
"paramDescription": "the name of the report queue",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
]
|
]
|
|
@ -9,65 +9,60 @@ import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.junit.jupiter.api.AfterEach;
|
import org.apache.spark.SparkConf;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.*;
|
||||||
|
import org.junit.jupiter.api.io.TempDir;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
||||||
import eu.dnetlib.dhp.model.mdstore.Provenance;
|
import eu.dnetlib.dhp.model.mdstore.Provenance;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
|
||||||
public class CollectionJobTest {
|
public class CollectionJobTest {
|
||||||
|
|
||||||
private Path testDir;
|
private static SparkSession spark;
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeAll
|
||||||
public void setup() throws IOException {
|
public static void beforeAll() {
|
||||||
testDir = Files.createTempDirectory("dhp-collection");
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(CollectionJobTest.class.getSimpleName());
|
||||||
|
conf.setMaster("local");
|
||||||
|
spark = SparkSession.builder().config(conf).getOrCreate();
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterEach
|
@AfterAll
|
||||||
public void teadDown() throws IOException {
|
public static void afterAll() {
|
||||||
FileUtils.deleteDirectory(testDir.toFile());
|
spark.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void tesCollection() throws Exception {
|
public void tesCollection(@TempDir Path testDir) throws Exception {
|
||||||
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
|
final Provenance provenance = new Provenance("pippo", "puppa", "ns_prefix");
|
||||||
|
Assertions.assertNotNull(new ObjectMapper().writeValueAsString(provenance));
|
||||||
|
|
||||||
GenerateNativeStoreSparkJob
|
GenerateNativeStoreSparkJob
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-mt",
|
"issm", "true",
|
||||||
"local",
|
"-w", "wid",
|
||||||
"-w",
|
"-e", "XML",
|
||||||
"wid",
|
"-d", "" + System.currentTimeMillis(),
|
||||||
"-e",
|
"-p", new ObjectMapper().writeValueAsString(provenance),
|
||||||
"XML",
|
"-x", "./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
|
||||||
"-d",
|
"-i", this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
|
||||||
"" + System.currentTimeMillis(),
|
"-o", testDir.toString() + "/store",
|
||||||
"-p",
|
"-t", "true",
|
||||||
new ObjectMapper().writeValueAsString(provenance),
|
"-ru", "",
|
||||||
"-x",
|
"-rp", "",
|
||||||
"./*[local-name()='record']/*[local-name()='header']/*[local-name()='identifier']",
|
"-rh", "",
|
||||||
"-i",
|
"-ro", "",
|
||||||
this.getClass().getResource("/eu/dnetlib/dhp/collection/native.seq").toString(),
|
"-rr", ""
|
||||||
"-o",
|
|
||||||
testDir.toString() + "/store",
|
|
||||||
"-t",
|
|
||||||
"true",
|
|
||||||
"-ru",
|
|
||||||
"",
|
|
||||||
"-rp",
|
|
||||||
"",
|
|
||||||
"-rh",
|
|
||||||
"",
|
|
||||||
"-ro",
|
|
||||||
"",
|
|
||||||
"-rr",
|
|
||||||
""
|
|
||||||
});
|
});
|
||||||
System.out.println(new ObjectMapper().writeValueAsString(provenance));
|
|
||||||
|
// TODO introduce useful assertions
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -85,9 +80,8 @@ public class CollectionJobTest {
|
||||||
null,
|
null,
|
||||||
null);
|
null);
|
||||||
|
|
||||||
assert record != null;
|
assertNotNull(record.getId());
|
||||||
System.out.println(record.getId());
|
assertNotNull(record.getOriginalId());
|
||||||
System.out.println(record.getOriginalId());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -112,10 +106,12 @@ public class CollectionJobTest {
|
||||||
System.currentTimeMillis(),
|
System.currentTimeMillis(),
|
||||||
null,
|
null,
|
||||||
null);
|
null);
|
||||||
assert record != null;
|
|
||||||
record.setBody("ciao");
|
record.setBody("ciao");
|
||||||
assert record1 != null;
|
|
||||||
record1.setBody("mondo");
|
record1.setBody("mondo");
|
||||||
|
|
||||||
|
assertNotNull(record);
|
||||||
|
assertNotNull(record1);
|
||||||
assertEquals(record, record1);
|
assertEquals(record, record1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,10 +12,14 @@ import java.util.Map;
|
||||||
import javax.xml.transform.stream.StreamSource;
|
import javax.xml.transform.stream.StreamSource;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.apache.spark.util.LongAccumulator;
|
import org.apache.spark.util.LongAccumulator;
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
import org.junit.jupiter.api.DisplayName;
|
import org.junit.jupiter.api.DisplayName;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
@ -23,6 +27,7 @@ import org.junit.jupiter.api.io.TempDir;
|
||||||
import org.mockito.Mock;
|
import org.mockito.Mock;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.CollectionJobTest;
|
||||||
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
||||||
import eu.dnetlib.dhp.transformation.functions.Cleaner;
|
import eu.dnetlib.dhp.transformation.functions.Cleaner;
|
||||||
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
|
import eu.dnetlib.dhp.transformation.vocabulary.Vocabulary;
|
||||||
|
@ -33,6 +38,21 @@ import net.sf.saxon.s9api.*;
|
||||||
@ExtendWith(MockitoExtension.class)
|
@ExtendWith(MockitoExtension.class)
|
||||||
public class TransformationJobTest {
|
public class TransformationJobTest {
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() {
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(CollectionJobTest.class.getSimpleName());
|
||||||
|
conf.setMaster("local");
|
||||||
|
spark = SparkSession.builder().config(conf).getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() {
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
private LongAccumulator accumulator;
|
private LongAccumulator accumulator;
|
||||||
|
|
||||||
|
@ -78,31 +98,21 @@ public class TransformationJobTest {
|
||||||
TransformSparkJobNode
|
TransformSparkJobNode
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-mt",
|
"-issm", "true",
|
||||||
"local",
|
"-i", mdstore_input,
|
||||||
"-i",
|
"-o", mdstore_output,
|
||||||
mdstore_input,
|
"-d", "1",
|
||||||
"-o",
|
"-w", "1",
|
||||||
mdstore_output,
|
"-tr", xslt,
|
||||||
"-d",
|
"-t", "true",
|
||||||
"1",
|
"-ru", "",
|
||||||
"-w",
|
"-rp", "",
|
||||||
"1",
|
"-rh", "",
|
||||||
"-tr",
|
"-ro", "",
|
||||||
xslt,
|
"-rr", ""
|
||||||
"-t",
|
|
||||||
"true",
|
|
||||||
"-ru",
|
|
||||||
"",
|
|
||||||
"-rp",
|
|
||||||
"",
|
|
||||||
"-rh",
|
|
||||||
"",
|
|
||||||
"-ro",
|
|
||||||
"",
|
|
||||||
"-rr",
|
|
||||||
""
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// TODO introduce useful assertions
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
# dhp-broker-events
|
||||||
|
dhp-broker-events is a DNET module responsible
|
||||||
|
of the production of events for the OpenAIRE Broker Service.
|
|
@ -0,0 +1,66 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<artifactId>dhp-broker-events</artifactId>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-io</groupId>
|
||||||
|
<artifactId>commons-io</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-hive_2.11</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-schemas</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.jayway.jsonpath</groupId>
|
||||||
|
<artifactId>json-path</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>dom4j</groupId>
|
||||||
|
<artifactId>dom4j</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jaxen</groupId>
|
||||||
|
<artifactId>jaxen</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib</groupId>
|
||||||
|
<artifactId>dnet-openaire-broker-common</artifactId>
|
||||||
|
<version>[1.0.0,2.0.0)</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,104 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.model;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class Event {
|
||||||
|
|
||||||
|
private String eventId;
|
||||||
|
|
||||||
|
private String producerId;
|
||||||
|
|
||||||
|
private String topic;
|
||||||
|
|
||||||
|
private String payload;
|
||||||
|
|
||||||
|
private Long creationDate;
|
||||||
|
|
||||||
|
private Long expiryDate;
|
||||||
|
|
||||||
|
private boolean instantMessage;
|
||||||
|
|
||||||
|
private Map<String, Object> map;
|
||||||
|
|
||||||
|
public Event() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Event(final String producerId, final String eventId, final String topic, final String payload,
|
||||||
|
final Long creationDate, final Long expiryDate,
|
||||||
|
final boolean instantMessage,
|
||||||
|
final Map<String, Object> map) {
|
||||||
|
this.producerId = producerId;
|
||||||
|
this.eventId = eventId;
|
||||||
|
this.topic = topic;
|
||||||
|
this.payload = payload;
|
||||||
|
this.creationDate = creationDate;
|
||||||
|
this.expiryDate = expiryDate;
|
||||||
|
this.instantMessage = instantMessage;
|
||||||
|
this.map = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getProducerId() {
|
||||||
|
return this.producerId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setProducerId(final String producerId) {
|
||||||
|
this.producerId = producerId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEventId() {
|
||||||
|
return this.eventId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEventId(final String eventId) {
|
||||||
|
this.eventId = eventId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTopic() {
|
||||||
|
return this.topic;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTopic(final String topic) {
|
||||||
|
this.topic = topic;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPayload() {
|
||||||
|
return this.payload;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPayload(final String payload) {
|
||||||
|
this.payload = payload;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getCreationDate() {
|
||||||
|
return this.creationDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCreationDate(final Long creationDate) {
|
||||||
|
this.creationDate = creationDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getExpiryDate() {
|
||||||
|
return this.expiryDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setExpiryDate(final Long expiryDate) {
|
||||||
|
this.expiryDate = expiryDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isInstantMessage() {
|
||||||
|
return this.instantMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInstantMessage(final boolean instantMessage) {
|
||||||
|
this.instantMessage = instantMessage;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, Object> getMap() {
|
||||||
|
return this.map;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMap(final Map<String, Object> map) {
|
||||||
|
this.map = map;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,140 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.model;
|
||||||
|
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.codec.digest.DigestUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
|
||||||
|
public class EventFactory {
|
||||||
|
|
||||||
|
private final static String PRODUCER_ID = "OpenAIRE";
|
||||||
|
|
||||||
|
private static final int TTH_DAYS = 365;
|
||||||
|
|
||||||
|
private final static String[] DATE_PATTERNS = {
|
||||||
|
"yyyy-MM-dd"
|
||||||
|
};
|
||||||
|
|
||||||
|
public static Event newBrokerEvent(final Result source, final Result target, final UpdateInfo<?> updateInfo) {
|
||||||
|
|
||||||
|
final long now = new Date().getTime();
|
||||||
|
|
||||||
|
final Event res = new Event();
|
||||||
|
|
||||||
|
final Map<String, Object> map = createMapFromResult(target, source, updateInfo);
|
||||||
|
|
||||||
|
final String payload = createPayload(target, updateInfo);
|
||||||
|
|
||||||
|
final String eventId = calculateEventId(
|
||||||
|
updateInfo.getTopic(), target.getOriginalId().get(0), updateInfo.getHighlightValueAsString());
|
||||||
|
|
||||||
|
res.setEventId(eventId);
|
||||||
|
res.setProducerId(PRODUCER_ID);
|
||||||
|
res.setPayload(payload);
|
||||||
|
res.setMap(map);
|
||||||
|
res.setTopic(updateInfo.getTopic());
|
||||||
|
res.setCreationDate(now);
|
||||||
|
res.setExpiryDate(calculateExpiryDate(now));
|
||||||
|
res.setInstantMessage(false);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String createPayload(final Result result, final UpdateInfo<?> updateInfo) {
|
||||||
|
final OpenAireEventPayload payload = new OpenAireEventPayload();
|
||||||
|
// TODO
|
||||||
|
|
||||||
|
updateInfo.compileHighlight(payload);
|
||||||
|
|
||||||
|
return payload.toJSON();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Map<String, Object> createMapFromResult(final Result oaf, final Result source,
|
||||||
|
final UpdateInfo<?> updateInfo) {
|
||||||
|
final Map<String, Object> map = new HashMap<>();
|
||||||
|
|
||||||
|
final List<KeyValue> collectedFrom = oaf.getCollectedfrom();
|
||||||
|
if (collectedFrom.size() == 1) {
|
||||||
|
map.put("target_datasource_id", collectedFrom.get(0).getKey());
|
||||||
|
map.put("target_datasource_name", collectedFrom.get(0).getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
final List<String> ids = oaf.getOriginalId();
|
||||||
|
if (ids.size() > 0) {
|
||||||
|
map.put("target_publication_id", ids.get(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
final List<StructuredProperty> titles = oaf.getTitle();
|
||||||
|
if (titles.size() > 0) {
|
||||||
|
map.put("target_publication_title", titles.get(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
final long date = parseDateTolong(oaf.getDateofacceptance().getValue());
|
||||||
|
if (date > 0) {
|
||||||
|
map.put("target_dateofacceptance", date);
|
||||||
|
}
|
||||||
|
|
||||||
|
final List<StructuredProperty> subjects = oaf.getSubject();
|
||||||
|
if (subjects.size() > 0) {
|
||||||
|
map
|
||||||
|
.put(
|
||||||
|
"target_publication_subject_list",
|
||||||
|
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
|
||||||
|
final List<Author> authors = oaf.getAuthor();
|
||||||
|
if (authors.size() > 0) {
|
||||||
|
map
|
||||||
|
.put(
|
||||||
|
"target_publication_author_list",
|
||||||
|
authors.stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// PROVENANCE INFO
|
||||||
|
map.put("trust", updateInfo.getTrust());
|
||||||
|
final List<KeyValue> sourceCollectedFrom = source.getCollectedfrom();
|
||||||
|
if (sourceCollectedFrom.size() == 1) {
|
||||||
|
map.put("provenance_datasource_id", sourceCollectedFrom.get(0).getKey());
|
||||||
|
map.put("provenance_datasource_name", sourceCollectedFrom.get(0).getValue());
|
||||||
|
}
|
||||||
|
map.put("provenance_publication_id_list", source.getOriginalId());
|
||||||
|
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String calculateEventId(final String topic, final String publicationId, final String value) {
|
||||||
|
return "event-"
|
||||||
|
+ DigestUtils.md5Hex(topic).substring(0, 6) + "-"
|
||||||
|
+ DigestUtils.md5Hex(publicationId).substring(0, 8) + "-"
|
||||||
|
+ DigestUtils.md5Hex(value).substring(0, 8);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long calculateExpiryDate(final long now) {
|
||||||
|
return now + TTH_DAYS * 24 * 60 * 60 * 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long parseDateTolong(final String date) {
|
||||||
|
if (StringUtils.isBlank(date)) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return DateUtils.parseDate(date, DATE_PATTERNS).getTime();
|
||||||
|
} catch (final ParseException e) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,112 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.broker.model.Event;
|
||||||
|
import eu.dnetlib.dhp.broker.model.EventFactory;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAbstract;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingAuthorOrcid;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingOpenAccess;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPid;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingProject;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingPublicationDate;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMissingSubject;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreOpenAccess;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMorePid;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.EnrichMoreSubject;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class GenerateEventsApplication {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(GenerateEventsApplication.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
GenerateEventsApplication.class
|
||||||
|
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/merge_claims_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String graphPath = parser.get("graphPath");
|
||||||
|
log.info("graphPath: {}", graphPath);
|
||||||
|
|
||||||
|
final String eventsPath = parser.get("eventsPath");
|
||||||
|
log.info("eventsPath: {}", eventsPath);
|
||||||
|
|
||||||
|
final SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
||||||
|
removeOutputDir(spark, eventsPath);
|
||||||
|
generateEvents(spark, graphPath, eventsPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void removeOutputDir(final SparkSession spark, final String path) {
|
||||||
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void generateEvents(final SparkSession spark, final String graphPath, final String eventsPath) {
|
||||||
|
// TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Event> generateEvents(final Result... children) {
|
||||||
|
final List<Event> list = new ArrayList<>();
|
||||||
|
|
||||||
|
for (final Result source : children) {
|
||||||
|
for (final Result target : children) {
|
||||||
|
if (source != target) {
|
||||||
|
list
|
||||||
|
.addAll(
|
||||||
|
findUpdates(source, target)
|
||||||
|
.stream()
|
||||||
|
.map(info -> EventFactory.newBrokerEvent(source, target, info))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<UpdateInfo<?>> findUpdates(final Result source, final Result target) {
|
||||||
|
final List<UpdateInfo<?>> list = new ArrayList<>();
|
||||||
|
list.addAll(EnrichMissingAbstract.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingAuthorOrcid.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingOpenAccess.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingPid.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingProject.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingPublicationDate.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMissingSubject.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMoreOpenAccess.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMorePid.findUpdates(source, target));
|
||||||
|
list.addAll(EnrichMoreSubject.findUpdates(source, target));
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingAbstract extends UpdateInfo<String> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingAbstract> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingAbstract(final String highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/ABSTRACT", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getAbstracts().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingAuthorOrcid extends UpdateInfo<String> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingAuthorOrcid> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingAuthorOrcid(final String highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/AUTHOR/ORCID", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
// TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.Instance;
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingOpenAccess extends UpdateInfo<Instance> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingOpenAccess> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingOpenAccess(final Instance highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/OPENACCESS_VERSION", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue().getUrl();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.broker.objects.Pid;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingPid extends UpdateInfo<Pid> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingPid> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingPid(final Pid highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/PID", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getPids().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.broker.objects.Project;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingProject extends UpdateInfo<Project> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingProject> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingProject(final Project highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/PROJECT", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getProjects().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue().getFunder() + "::" + getHighlightValue().getFundingProgram()
|
||||||
|
+ getHighlightValue().getCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingPublicationDate extends UpdateInfo<String> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingPublicationDate> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingPublicationDate(final String highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/PUBLICATION_DATE", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().setPublicationdate(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMissingSubject extends UpdateInfo<String> {
|
||||||
|
|
||||||
|
public static List<EnrichMissingSubject> findUpdates(final Result source, final Result target) {
|
||||||
|
// MESHEUROPMC
|
||||||
|
// ARXIV
|
||||||
|
// JEL
|
||||||
|
// DDC
|
||||||
|
// ACM
|
||||||
|
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMissingSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MISSING/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.Instance;
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMoreOpenAccess extends UpdateInfo<Instance> {
|
||||||
|
|
||||||
|
public static List<EnrichMoreOpenAccess> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMoreOpenAccess(final Instance highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MORE/OPENACCESS_VERSION", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getInstances().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue().getUrl();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.broker.objects.Pid;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMorePid extends UpdateInfo<Pid> {
|
||||||
|
|
||||||
|
public static List<EnrichMorePid> findUpdates(final Result source, final Result target) {
|
||||||
|
// return Arrays.asList(new EnrichMissingAbstract("xxxxxxx", 0.9f));
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMorePid(final Pid highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MORE/PID", highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getPids().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue().getType() + "::" + getHighlightValue().getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class EnrichMoreSubject extends UpdateInfo<String> {
|
||||||
|
|
||||||
|
public static List<EnrichMoreSubject> findUpdates(final Result source, final Result target) {
|
||||||
|
// MESHEUROPMC
|
||||||
|
// ARXIV
|
||||||
|
// JEL
|
||||||
|
// DDC
|
||||||
|
// ACM
|
||||||
|
|
||||||
|
return Arrays.asList();
|
||||||
|
}
|
||||||
|
|
||||||
|
private EnrichMoreSubject(final String subjectClassification, final String highlightValue, final float trust) {
|
||||||
|
super("ENRICH/MORE/SUBJECT/" + subjectClassification, highlightValue, trust);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void compileHighlight(final OpenAireEventPayload payload) {
|
||||||
|
payload.getHighlight().getSubjects().add(getHighlightValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHighlightValueAsString() {
|
||||||
|
return getHighlightValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,36 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
|
||||||
|
public abstract class UpdateInfo<T> {
|
||||||
|
|
||||||
|
private final String topic;
|
||||||
|
|
||||||
|
private final T highlightValue;
|
||||||
|
|
||||||
|
private final float trust;
|
||||||
|
|
||||||
|
protected UpdateInfo(final String topic, final T highlightValue, final float trust) {
|
||||||
|
this.topic = topic;
|
||||||
|
this.highlightValue = highlightValue;
|
||||||
|
this.trust = trust;
|
||||||
|
}
|
||||||
|
|
||||||
|
public T getHighlightValue() {
|
||||||
|
return highlightValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float getTrust() {
|
||||||
|
return trust;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTopic() {
|
||||||
|
return topic;
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract public void compileHighlight(OpenAireEventPayload payload);
|
||||||
|
|
||||||
|
abstract public String getHighlightValueAsString();
|
||||||
|
|
||||||
|
}
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>dhp-dedup-openaire</artifactId>
|
<artifactId>dhp-dedup-openaire</artifactId>
|
||||||
|
|
|
@ -137,10 +137,14 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
|
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
|
||||||
|
|
||||||
|
String entityType = dedupConf.getWf().getEntityType();
|
||||||
|
|
||||||
Relation r = new Relation();
|
Relation r = new Relation();
|
||||||
r.setSource(source);
|
r.setSource(source);
|
||||||
r.setTarget(target);
|
r.setTarget(target);
|
||||||
r.setRelClass(relClass);
|
r.setRelClass(relClass);
|
||||||
|
r.setRelType(entityType + entityType.substring(0, 1).toUpperCase() + entityType.substring(1));
|
||||||
r.setSubRelType("dedup");
|
r.setSubRelType("dedup");
|
||||||
|
|
||||||
DataInfo info = new DataInfo();
|
DataInfo info = new DataInfo();
|
||||||
|
|
|
@ -86,7 +86,8 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
||||||
mergedIds,
|
mergedIds,
|
||||||
FieldType.TARGET,
|
FieldType.TARGET,
|
||||||
getFixRelFn(FieldType.TARGET))
|
getFixRelFn(FieldType.TARGET))
|
||||||
.filter(SparkPropagateRelation::containsDedup);
|
.filter(SparkPropagateRelation::containsDedup)
|
||||||
|
.distinct();
|
||||||
|
|
||||||
Dataset<Relation> updated = processDataset(
|
Dataset<Relation> updated = processDataset(
|
||||||
processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()),
|
processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()),
|
||||||
|
|
|
@ -75,12 +75,20 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="CreateSimRel"/>
|
<start to="resetWorkingPath"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="resetWorkingPath">
|
||||||
|
<fs>
|
||||||
|
<delete path="${workingPath}"/>
|
||||||
|
</fs>
|
||||||
|
<ok to="CreateSimRel"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<action name="CreateSimRel">
|
<action name="CreateSimRel">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.PairFunction;
|
import org.apache.spark.api.java.function.PairFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -29,6 +30,8 @@ import org.mockito.Mock;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
|
@ -420,7 +423,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
|
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
|
||||||
|
|
||||||
assertEquals(5022, relations);
|
assertEquals(4975, relations);
|
||||||
|
|
||||||
// check deletedbyinference
|
// check deletedbyinference
|
||||||
final Dataset<Relation> mergeRels = spark
|
final Dataset<Relation> mergeRels = spark
|
||||||
|
@ -450,6 +453,25 @@ public class SparkDedupTest implements Serializable {
|
||||||
assertEquals(updated, deletedbyinference);
|
assertEquals(updated, deletedbyinference);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Order(6)
|
||||||
|
public void testRelations() throws Exception {
|
||||||
|
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_1.json", 12, 10);
|
||||||
|
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_2.json", 10, 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testUniqueness(String path, int expected_total, int expected_unique) {
|
||||||
|
Dataset<Relation> rel = spark
|
||||||
|
.read()
|
||||||
|
.textFile(getClass().getResource(path).getPath())
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Relation>) s -> new ObjectMapper().readValue(s, Relation.class),
|
||||||
|
Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
assertEquals(expected_total, rel.count());
|
||||||
|
assertEquals(expected_unique, rel.distinct().count());
|
||||||
|
}
|
||||||
|
|
||||||
@AfterAll
|
@AfterAll
|
||||||
public static void finalCleanUp() throws IOException {
|
public static void finalCleanUp() throws IOException {
|
||||||
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::40c7b1dfa18c3693d374dafd21ef852f","subRelType":"provision","target":"10|doajarticles::618df40624078491acfd93ca3ff6921c"}
|
||||||
|
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::0b4e756a73338f60b84de98d080f6422","subRelType":"provision","target":"10|doajarticles::6d01e689db13b6977b411f4170b6143b"}
|
||||||
|
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::fe2f7c9d350b9c5aa658ec384d761e33","subRelType":"provision","target":"10|doajarticles::9b8a956b0703854ba79e52ddf7dc552e"}
|
||||||
|
{"collectedfrom":[{"key":"10|driver______::bee53aa31dc2cbb538c10c2b65fa5824","value":"DOAJ-Articles"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|doajarticles::a116734108ba011ef715b012f095e3f5","subRelType":"provision","target":"10|doajarticles::c5de04b1a35da2cc4468e299bc9ffa16"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::8b83abbbcad5496fe43cda88d0045aa4","subRelType":"provision","target":"10|opendoar____::6855456e2fe46a9d49d3d3af4f57443d"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::88034de0247d9d36e22783e9319c5ba3","subRelType":"provision","target":"10|opendoar____::c17028c9b6e0c5deaad29665d582284a"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::47ce9e9f4fad46e732cff06419ecaabb","value":"OpenDOAR"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|opendoar____::dfb21c796f33e9acf505cc960a3d8d2c","subRelType":"provision","target":"10|opendoar____::dfa037a53e121ecc9e0926800c3e814e"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::b526b1aa1562038881a31be59896985f","subRelType":"provision","target":"10|re3data_____::2e457773b62df3534cc04441bf406a70"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::6b306183bc051b5aaa5376f2fab6e6e5","subRelType":"provision","target":"10|re3data_____::6371ff9ee1ec7073416cb83c868b10a3"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
||||||
|
{"collectedfrom":[{"key":"10|openaire____::21f8a223b9925c2f87c404096080b046","value":"Registry of Research Data Repository"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"sysimport:crosswalk:entityregistry","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.9"},"lastupdatetimestamp":1588608946167,"relClass":"provides","relType":"datasourceOrganization","source":"20|re3data_____::0f697c2543a43bc0da793bf78ecd4996","subRelType":"provision","target":"10|re3data_____::770ef1f8eb03f174c0add746523c6f28"}
|
|
@ -0,0 +1,10 @@
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681628"}
|
||||||
|
{"collectedfrom":null,"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"decisiontree-dedup-test","inferred":true,"invisible":false,"provenanceaction":{"classid":"sysimport:dedup","classname":"sysimport:dedup","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":null},"lastupdatetimestamp":null,"relClass":"isMergedIn","relType":"resultResult","source":"50|dedup_wf_001::498c4e6cfff198831b488a6c62221241","subRelType":"dedup","target":"50|doiboost____::8e5e14d80d0f2ebe6a6a55d972681629"}
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.oaiIProvenance;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -24,7 +25,6 @@ import org.dom4j.DocumentFactory;
|
||||||
import org.dom4j.DocumentHelper;
|
import org.dom4j.DocumentHelper;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
import eu.dnetlib.dhp.schema.oaf.Context;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
|
@ -48,6 +48,21 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
protected final Map<String, String> code2name;
|
protected final Map<String, String> code2name;
|
||||||
|
|
||||||
|
protected static final String DATACITE_SCHEMA_KERNEL_4 = "http://datacite.org/schema/kernel-4";
|
||||||
|
protected static final String DATACITE_SCHEMA_KERNEL_3 = "http://datacite.org/schema/kernel-3";
|
||||||
|
|
||||||
|
protected static final Map<String, String> nsContext = new HashMap<>();
|
||||||
|
|
||||||
|
static {
|
||||||
|
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
|
||||||
|
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
|
||||||
|
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
|
||||||
|
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
|
||||||
|
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
|
||||||
|
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
|
||||||
|
nsContext.put("datacite", DATACITE_SCHEMA_KERNEL_3);
|
||||||
|
}
|
||||||
|
|
||||||
protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier(
|
protected static final Qualifier MAIN_TITLE_QUALIFIER = qualifier(
|
||||||
"main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
|
"main title", "main title", "dnet:dataCite_title", "dnet:dataCite_title");
|
||||||
|
|
||||||
|
@ -57,31 +72,27 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
public List<Oaf> processMdRecord(final String xml) {
|
public List<Oaf> processMdRecord(final String xml) {
|
||||||
try {
|
try {
|
||||||
final Map<String, String> nsContext = new HashMap<>();
|
|
||||||
nsContext.put("dr", "http://www.driver-repository.eu/namespace/dr");
|
|
||||||
nsContext.put("dri", "http://www.driver-repository.eu/namespace/dri");
|
|
||||||
nsContext.put("oaf", "http://namespace.openaire.eu/oaf");
|
|
||||||
nsContext.put("oai", "http://www.openarchives.org/OAI/2.0/");
|
|
||||||
nsContext.put("prov", "http://www.openarchives.org/OAI/2.0/provenance");
|
|
||||||
nsContext.put("dc", "http://purl.org/dc/elements/1.1/");
|
|
||||||
nsContext.put("datacite", "http://datacite.org/schema/kernel-3");
|
|
||||||
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
DocumentFactory.getInstance().setXPathNamespaceURIs(nsContext);
|
||||||
|
|
||||||
final Document doc = DocumentHelper
|
final Document doc = DocumentHelper
|
||||||
.parseText(
|
.parseText(
|
||||||
xml
|
xml.replaceAll(DATACITE_SCHEMA_KERNEL_4, DATACITE_SCHEMA_KERNEL_3));
|
||||||
.replaceAll(
|
|
||||||
"http://datacite.org/schema/kernel-4", "http://datacite.org/schema/kernel-3"));
|
|
||||||
|
|
||||||
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
final String type = doc.valueOf("//dr:CobjCategory/@type");
|
||||||
final KeyValue collectedFrom = keyValue(
|
final KeyValue collectedFrom = getProvenanceDatasource(
|
||||||
createOpenaireId(10, doc.valueOf("//oaf:collectedFrom/@id"), true),
|
doc, "//oaf:collectedFrom/@id", "//oaf:collectedFrom/@name");
|
||||||
doc.valueOf("//oaf:collectedFrom/@name"));
|
|
||||||
|
if (collectedFrom == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
final KeyValue hostedBy = StringUtils.isBlank(doc.valueOf("//oaf:hostedBy/@id"))
|
||||||
? collectedFrom
|
? collectedFrom
|
||||||
: keyValue(
|
: getProvenanceDatasource(doc, "//oaf:hostedBy/@id", "//oaf:hostedBy/@name");
|
||||||
createOpenaireId(10, doc.valueOf("//oaf:hostedBy/@id"), true),
|
|
||||||
doc.valueOf("//oaf:hostedBy/@name"));
|
if (hostedBy == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
final DataInfo info = prepareDataInfo(doc);
|
final DataInfo info = prepareDataInfo(doc);
|
||||||
final long lastUpdateTimestamp = new Date().getTime();
|
final long lastUpdateTimestamp = new Date().getTime();
|
||||||
|
@ -92,6 +103,19 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private KeyValue getProvenanceDatasource(Document doc, String xpathId, String xpathName) {
|
||||||
|
final String dsId = doc.valueOf(xpathId);
|
||||||
|
final String dsName = doc.valueOf(xpathName);
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(dsId) | StringUtils.isBlank(dsName)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return keyValue(
|
||||||
|
createOpenaireId(10, dsId, true),
|
||||||
|
dsName);
|
||||||
|
}
|
||||||
|
|
||||||
protected List<Oaf> createOafs(
|
protected List<Oaf> createOafs(
|
||||||
final Document doc,
|
final Document doc,
|
||||||
final String type,
|
final String type,
|
||||||
|
@ -107,14 +131,14 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
case "publication":
|
case "publication":
|
||||||
final Publication p = new Publication();
|
final Publication p = new Publication();
|
||||||
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
populateResultFields(p, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
p.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
|
p.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
|
||||||
p.setJournal(prepareJournal(doc, info));
|
p.setJournal(prepareJournal(doc, info));
|
||||||
oafs.add(p);
|
oafs.add(p);
|
||||||
break;
|
break;
|
||||||
case "dataset":
|
case "dataset":
|
||||||
final Dataset d = new Dataset();
|
final Dataset d = new Dataset();
|
||||||
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
populateResultFields(d, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
d.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
|
d.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
|
||||||
d.setStoragedate(prepareDatasetStorageDate(doc, info));
|
d.setStoragedate(prepareDatasetStorageDate(doc, info));
|
||||||
d.setDevice(prepareDatasetDevice(doc, info));
|
d.setDevice(prepareDatasetDevice(doc, info));
|
||||||
d.setSize(prepareDatasetSize(doc, info));
|
d.setSize(prepareDatasetSize(doc, info));
|
||||||
|
@ -127,7 +151,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
case "software":
|
case "software":
|
||||||
final Software s = new Software();
|
final Software s = new Software();
|
||||||
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
populateResultFields(s, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
s.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
|
s.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
|
||||||
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
|
s.setDocumentationUrl(prepareSoftwareDocumentationUrls(doc, info));
|
||||||
s.setLicense(prepareSoftwareLicenses(doc, info));
|
s.setLicense(prepareSoftwareLicenses(doc, info));
|
||||||
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
|
s.setCodeRepositoryUrl(prepareSoftwareCodeRepositoryUrl(doc, info));
|
||||||
|
@ -138,7 +162,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
default:
|
default:
|
||||||
final OtherResearchProduct o = new OtherResearchProduct();
|
final OtherResearchProduct o = new OtherResearchProduct();
|
||||||
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
populateResultFields(o, doc, collectedFrom, hostedBy, info, lastUpdateTimestamp);
|
||||||
o.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
|
o.setResulttype(ORP_DEFAULT_RESULTTYPE);
|
||||||
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
|
o.setContactperson(prepareOtherResearchProductContactPersons(doc, info));
|
||||||
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
|
o.setContactgroup(prepareOtherResearchProductContactGroups(doc, info));
|
||||||
o.setTool(prepareOtherResearchProductTools(doc, info));
|
o.setTool(prepareOtherResearchProductTools(doc, info));
|
||||||
|
@ -171,33 +195,36 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
if (StringUtils.isNotBlank(originalId)) {
|
if (StringUtils.isNotBlank(originalId)) {
|
||||||
final String projectId = createOpenaireId(40, originalId, true);
|
final String projectId = createOpenaireId(40, originalId, true);
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
res
|
||||||
r1.setRelType("resultProject");
|
.add(
|
||||||
r1.setSubRelType("outcome");
|
getRelation(
|
||||||
r1.setRelClass("isProducedBy");
|
docId, projectId, RESULT_PROJECT, OUTCOME, IS_PRODUCED_BY, collectedFrom, info,
|
||||||
r1.setSource(docId);
|
lastUpdateTimestamp));
|
||||||
r1.setTarget(projectId);
|
res
|
||||||
r1.setCollectedfrom(Arrays.asList(collectedFrom));
|
.add(
|
||||||
r1.setDataInfo(info);
|
getRelation(
|
||||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
projectId, docId, RESULT_PROJECT, OUTCOME, PRODUCES, collectedFrom, info,
|
||||||
res.add(r1);
|
lastUpdateTimestamp));
|
||||||
|
|
||||||
final Relation r2 = new Relation();
|
|
||||||
r2.setRelType("resultProject");
|
|
||||||
r2.setSubRelType("outcome");
|
|
||||||
r2.setRelClass("produces");
|
|
||||||
r2.setSource(projectId);
|
|
||||||
r2.setTarget(docId);
|
|
||||||
r2.setCollectedfrom(Arrays.asList(collectedFrom));
|
|
||||||
r2.setDataInfo(info);
|
|
||||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
|
||||||
res.add(r2);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected Relation getRelation(String source, String target, String relType, String subRelType, String relClass,
|
||||||
|
KeyValue collectedFrom, DataInfo info, long lastUpdateTimestamp) {
|
||||||
|
final Relation rel = new Relation();
|
||||||
|
rel.setRelType(relType);
|
||||||
|
rel.setSubRelType(subRelType);
|
||||||
|
rel.setRelClass(relClass);
|
||||||
|
rel.setSource(source);
|
||||||
|
rel.setTarget(target);
|
||||||
|
rel.setCollectedfrom(Arrays.asList(collectedFrom));
|
||||||
|
rel.setDataInfo(info);
|
||||||
|
rel.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
return rel;
|
||||||
|
}
|
||||||
|
|
||||||
protected abstract List<Oaf> addOtherResultRels(
|
protected abstract List<Oaf> addOtherResultRels(
|
||||||
final Document doc,
|
final Document doc,
|
||||||
final KeyValue collectedFrom,
|
final KeyValue collectedFrom,
|
||||||
|
@ -423,7 +450,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
if (n == null) {
|
if (n == null) {
|
||||||
return dataInfo(
|
return dataInfo(
|
||||||
false, null, false, false, MigrationConstants.REPOSITORY_PROVENANCE_ACTIONS, "0.9");
|
false, null, false, false, REPOSITORY_PROVENANCE_ACTIONS, "0.9");
|
||||||
}
|
}
|
||||||
|
|
||||||
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
final String paClassId = n.valueOf("./oaf:provenanceaction/@classid");
|
||||||
|
|
|
@ -95,6 +95,7 @@ public class GenerateEntitiesApplication {
|
||||||
.sequenceFile(sp, Text.class, Text.class)
|
.sequenceFile(sp, Text.class, Text.class)
|
||||||
.map(k -> new Tuple2<>(k._1().toString(), k._2().toString()))
|
.map(k -> new Tuple2<>(k._1().toString(), k._2().toString()))
|
||||||
.map(k -> convertToListOaf(k._1(), k._2(), code2name))
|
.map(k -> convertToListOaf(k._1(), k._2(), code2name))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
.flatMap(list -> list.iterator()));
|
.flatMap(list -> list.iterator()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listFields;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.listKeyValues;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -31,7 +32,6 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
|
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.MigrationConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
import eu.dnetlib.dhp.schema.oaf.Context;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
@ -55,6 +55,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class);
|
private static final Log log = LogFactory.getLog(MigrateDbEntitiesApplication.class);
|
||||||
|
|
||||||
|
public static final String SOURCE_TYPE = "source_type";
|
||||||
|
public static final String TARGET_TYPE = "target_type";
|
||||||
|
|
||||||
private final DbClient dbClient;
|
private final DbClient dbClient;
|
||||||
|
|
||||||
private final long lastUpdateTimestamp;
|
private final long lastUpdateTimestamp;
|
||||||
|
@ -62,7 +65,8 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
public static void main(final String[] args) throws Exception {
|
public static void main(final String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
IOUtils
|
IOUtils
|
||||||
.toString(MigrateDbEntitiesApplication.class
|
.toString(
|
||||||
|
MigrateDbEntitiesApplication.class
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
|
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_db_entities_parameters.json")));
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
@ -134,7 +138,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
||||||
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
|
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
|
||||||
ds
|
ds
|
||||||
.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
ds.setPid(new ArrayList<>());
|
ds.setPid(new ArrayList<>());
|
||||||
ds.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
ds.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
ds.setDateoftransformation(null); // Value not returned by the SQL query
|
ds.setDateoftransformation(null); // Value not returned by the SQL query
|
||||||
|
@ -195,7 +202,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
p.setId(createOpenaireId(40, rs.getString("projectid"), true));
|
p.setId(createOpenaireId(40, rs.getString("projectid"), true));
|
||||||
p.setOriginalId(Arrays.asList(rs.getString("projectid")));
|
p.setOriginalId(Arrays.asList(rs.getString("projectid")));
|
||||||
p
|
p
|
||||||
.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
p.setPid(new ArrayList<>());
|
p.setPid(new ArrayList<>());
|
||||||
p.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
p.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
p.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
p.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
||||||
|
@ -249,7 +259,10 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
o.setId(createOpenaireId(20, rs.getString("organizationid"), true));
|
o.setId(createOpenaireId(20, rs.getString("organizationid"), true));
|
||||||
o.setOriginalId(Arrays.asList(rs.getString("organizationid")));
|
o.setOriginalId(Arrays.asList(rs.getString("organizationid")));
|
||||||
o
|
o
|
||||||
.setCollectedfrom(listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname")));
|
.setCollectedfrom(
|
||||||
|
listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true),
|
||||||
|
rs.getString("collectedfromname")));
|
||||||
o.setPid(new ArrayList<>());
|
o.setPid(new ArrayList<>());
|
||||||
o.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
o.setDateofcollection(asString(rs.getDate("dateofcollection")));
|
||||||
o.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
o.setDateoftransformation(asString(rs.getDate("dateoftransformation")));
|
||||||
|
@ -267,9 +280,11 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
.setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
.setEcresearchorganization(field(Boolean.toString(rs.getBoolean("ecresearchorganization")), info));
|
||||||
o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
o.setEchighereducation(field(Boolean.toString(rs.getBoolean("echighereducation")), info));
|
||||||
o
|
o
|
||||||
.setEcinternationalorganizationeurinterests(field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
|
.setEcinternationalorganizationeurinterests(
|
||||||
|
field(Boolean.toString(rs.getBoolean("ecinternationalorganizationeurinterests")), info));
|
||||||
o
|
o
|
||||||
.setEcinternationalorganization(field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
.setEcinternationalorganization(
|
||||||
|
field(Boolean.toString(rs.getBoolean("ecinternationalorganization")), info));
|
||||||
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
o.setEcenterprise(field(Boolean.toString(rs.getBoolean("ecenterprise")), info));
|
||||||
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
||||||
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
||||||
|
@ -288,12 +303,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final DataInfo info = prepareDataInfo(rs);
|
final DataInfo info = prepareDataInfo(rs);
|
||||||
final String orgId = createOpenaireId(20, rs.getString("organization"), true);
|
final String orgId = createOpenaireId(20, rs.getString("organization"), true);
|
||||||
final String dsId = createOpenaireId(10, rs.getString("datasource"), true);
|
final String dsId = createOpenaireId(10, rs.getString("datasource"), true);
|
||||||
final List<KeyValue> collectedFrom = listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
final List<KeyValue> collectedFrom = listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
r1.setRelType("datasourceOrganization");
|
r1.setRelType(DATASOURCE_ORGANIZATION);
|
||||||
r1.setSubRelType("provision");
|
r1.setSubRelType(PROVISION);
|
||||||
r1.setRelClass("isProvidedBy");
|
r1.setRelClass(IS_PROVIDED_BY);
|
||||||
r1.setSource(dsId);
|
r1.setSource(dsId);
|
||||||
r1.setTarget(orgId);
|
r1.setTarget(orgId);
|
||||||
r1.setCollectedfrom(collectedFrom);
|
r1.setCollectedfrom(collectedFrom);
|
||||||
|
@ -301,9 +317,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
|
||||||
final Relation r2 = new Relation();
|
final Relation r2 = new Relation();
|
||||||
r2.setRelType("datasourceOrganization");
|
r2.setRelType(DATASOURCE_ORGANIZATION);
|
||||||
r2.setSubRelType("provision");
|
r2.setSubRelType(PROVISION);
|
||||||
r2.setRelClass("provides");
|
r2.setRelClass(PROVIDES);
|
||||||
r2.setSource(orgId);
|
r2.setSource(orgId);
|
||||||
r2.setTarget(dsId);
|
r2.setTarget(dsId);
|
||||||
r2.setCollectedfrom(collectedFrom);
|
r2.setCollectedfrom(collectedFrom);
|
||||||
|
@ -321,12 +337,13 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final DataInfo info = prepareDataInfo(rs);
|
final DataInfo info = prepareDataInfo(rs);
|
||||||
final String orgId = createOpenaireId(20, rs.getString("resporganization"), true);
|
final String orgId = createOpenaireId(20, rs.getString("resporganization"), true);
|
||||||
final String projectId = createOpenaireId(40, rs.getString("project"), true);
|
final String projectId = createOpenaireId(40, rs.getString("project"), true);
|
||||||
final List<KeyValue> collectedFrom = listKeyValues(createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
final List<KeyValue> collectedFrom = listKeyValues(
|
||||||
|
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
r1.setRelType("projectOrganization");
|
r1.setRelType(PROJECT_ORGANIZATION);
|
||||||
r1.setSubRelType("participation");
|
r1.setSubRelType(PARTICIPATION);
|
||||||
r1.setRelClass("hasParticipant");
|
r1.setRelClass(HAS_PARTICIPANT);
|
||||||
r1.setSource(projectId);
|
r1.setSource(projectId);
|
||||||
r1.setTarget(orgId);
|
r1.setTarget(orgId);
|
||||||
r1.setCollectedfrom(collectedFrom);
|
r1.setCollectedfrom(collectedFrom);
|
||||||
|
@ -334,9 +351,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
|
||||||
final Relation r2 = new Relation();
|
final Relation r2 = new Relation();
|
||||||
r2.setRelType("projectOrganization");
|
r2.setRelType(PROJECT_ORGANIZATION);
|
||||||
r2.setSubRelType("participation");
|
r2.setSubRelType(PARTICIPATION);
|
||||||
r2.setRelClass("isParticipant");
|
r2.setRelClass(IS_PARTICIPANT);
|
||||||
r2.setSource(orgId);
|
r2.setSource(orgId);
|
||||||
r2.setTarget(projectId);
|
r2.setTarget(projectId);
|
||||||
r2.setCollectedfrom(collectedFrom);
|
r2.setCollectedfrom(collectedFrom);
|
||||||
|
@ -351,28 +368,30 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
public List<Oaf> processClaims(final ResultSet rs) {
|
public List<Oaf> processClaims(final ResultSet rs) {
|
||||||
|
|
||||||
final DataInfo info =
|
final DataInfo info = dataInfo(
|
||||||
dataInfo(false, null, false, false, qualifier("user:claim", "user:claim", "dnet:provenanceActions", "dnet:provenanceActions"), "0.9");
|
false, null, false, false,
|
||||||
|
qualifier(USER_CLAIM, USER_CLAIM, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), "0.9");
|
||||||
|
|
||||||
final List<KeyValue> collectedFrom = listKeyValues(createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
|
final List<KeyValue> collectedFrom = listKeyValues(
|
||||||
|
createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
if (rs.getString("source_type").equals("context")) {
|
if (rs.getString(SOURCE_TYPE).equals("context")) {
|
||||||
final Result r;
|
final Result r;
|
||||||
|
|
||||||
if (rs.getString("target_type").equals("dataset")) {
|
if (rs.getString(TARGET_TYPE).equals("dataset")) {
|
||||||
r = new Dataset();
|
r = new Dataset();
|
||||||
r.setResulttype(MigrationConstants.DATASET_RESULTTYPE_QUALIFIER);
|
r.setResulttype(DATASET_DEFAULT_RESULTTYPE);
|
||||||
} else if (rs.getString("target_type").equals("software")) {
|
} else if (rs.getString(TARGET_TYPE).equals("software")) {
|
||||||
r = new Software();
|
r = new Software();
|
||||||
r.setResulttype(MigrationConstants.SOFTWARE_RESULTTYPE_QUALIFIER);
|
r.setResulttype(SOFTWARE_DEFAULT_RESULTTYPE);
|
||||||
} else if (rs.getString("target_type").equals("other")) {
|
} else if (rs.getString(TARGET_TYPE).equals("other")) {
|
||||||
r = new OtherResearchProduct();
|
r = new OtherResearchProduct();
|
||||||
r.setResulttype(MigrationConstants.OTHER_RESULTTYPE_QUALIFIER);
|
r.setResulttype(ORP_DEFAULT_RESULTTYPE);
|
||||||
} else {
|
} else {
|
||||||
r = new Publication();
|
r = new Publication();
|
||||||
r.setResulttype(MigrationConstants.PUBLICATION_RESULTTYPE_QUALIFIER);
|
r.setResulttype(PUBLICATION_DEFAULT_RESULTTYPE);
|
||||||
}
|
}
|
||||||
r.setId(createOpenaireId(50, rs.getString("target_id"), false));
|
r.setId(createOpenaireId(50, rs.getString("target_id"), false));
|
||||||
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
@ -382,32 +401,32 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
|
|
||||||
return Arrays.asList(r);
|
return Arrays.asList(r);
|
||||||
} else {
|
} else {
|
||||||
final String sourceId = createOpenaireId(rs.getString("source_type"), rs.getString("source_id"), false);
|
final String sourceId = createOpenaireId(rs.getString(SOURCE_TYPE), rs.getString("source_id"), false);
|
||||||
final String targetId = createOpenaireId(rs.getString("target_type"), rs.getString("target_id"), false);
|
final String targetId = createOpenaireId(rs.getString(TARGET_TYPE), rs.getString("target_id"), false);
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
final Relation r1 = new Relation();
|
||||||
final Relation r2 = new Relation();
|
final Relation r2 = new Relation();
|
||||||
|
|
||||||
if (rs.getString("source_type").equals("project")) {
|
if (rs.getString(SOURCE_TYPE).equals("project")) {
|
||||||
r1.setCollectedfrom(collectedFrom);
|
r1.setCollectedfrom(collectedFrom);
|
||||||
r1.setRelType("resultProject");
|
r1.setRelType(RESULT_PROJECT);
|
||||||
r1.setSubRelType("outcome");
|
r1.setSubRelType(OUTCOME);
|
||||||
r1.setRelClass("produces");
|
r1.setRelClass(PRODUCES);
|
||||||
|
|
||||||
r2.setCollectedfrom(collectedFrom);
|
r2.setCollectedfrom(collectedFrom);
|
||||||
r2.setRelType("resultProject");
|
r2.setRelType(RESULT_PROJECT);
|
||||||
r2.setSubRelType("outcome");
|
r2.setSubRelType(OUTCOME);
|
||||||
r2.setRelClass("isProducedBy");
|
r2.setRelClass(IS_PRODUCED_BY);
|
||||||
} else {
|
} else {
|
||||||
r1.setCollectedfrom(collectedFrom);
|
r1.setCollectedfrom(collectedFrom);
|
||||||
r1.setRelType("resultResult");
|
r1.setRelType(RESULT_RESULT);
|
||||||
r1.setSubRelType("relationship");
|
r1.setSubRelType(RELATIONSHIP);
|
||||||
r1.setRelClass("isRelatedTo");
|
r1.setRelClass(IS_RELATED_TO);
|
||||||
|
|
||||||
r2.setCollectedfrom(collectedFrom);
|
r2.setCollectedfrom(collectedFrom);
|
||||||
r2.setRelType("resultResult");
|
r2.setRelType(RESULT_RESULT);
|
||||||
r2.setSubRelType("relationship");
|
r2.setSubRelType(RELATIONSHIP);
|
||||||
r2.setRelClass("isRelatedTo");
|
r2.setRelClass(IS_RELATED_TO);
|
||||||
}
|
}
|
||||||
|
|
||||||
r1.setSource(sourceId);
|
r1.setSource(sourceId);
|
||||||
|
@ -440,11 +459,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
final String inferenceprovenance = rs.getString("inferenceprovenance");
|
||||||
final Boolean inferred = rs.getBoolean("inferred");
|
final Boolean inferred = rs.getBoolean("inferred");
|
||||||
final String trust = rs.getString("trust");
|
final String trust = rs.getString("trust");
|
||||||
return dataInfo(deletedbyinference, inferenceprovenance, inferred, false, MigrationConstants.ENTITYREGISTRY_PROVENANCE_ACTION, trust);
|
return dataInfo(
|
||||||
|
deletedbyinference, inferenceprovenance, inferred, false, ENTITYREGISTRY_PROVENANCE_ACTION, trust);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Qualifier prepareQualifierSplitting(final String s) {
|
private Qualifier prepareQualifierSplitting(final String s) {
|
||||||
if (StringUtils.isBlank(s)) { return null; }
|
if (StringUtils.isBlank(s)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
final String[] arr = s.split("@@@");
|
final String[] arr = s.split("@@@");
|
||||||
return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null;
|
return arr.length == 4 ? qualifier(arr[0], arr[1], arr[2], arr[3]) : null;
|
||||||
}
|
}
|
||||||
|
@ -458,12 +480,16 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
}
|
}
|
||||||
|
|
||||||
private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) {
|
private StructuredProperty prepareStructProp(final String s, final DataInfo dataInfo) {
|
||||||
if (StringUtils.isBlank(s)) { return null; }
|
if (StringUtils.isBlank(s)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
final String[] parts = s.split("###");
|
final String[] parts = s.split("###");
|
||||||
if (parts.length == 2) {
|
if (parts.length == 2) {
|
||||||
final String value = parts[0];
|
final String value = parts[0];
|
||||||
final String[] arr = parts[1].split("@@@");
|
final String[] arr = parts[1].split("@@@");
|
||||||
if (arr.length == 4) { return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo); }
|
if (arr.length == 4) {
|
||||||
|
return structuredProperty(value, arr[0], arr[1], arr[2], arr[3], dataInfo);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -489,8 +515,9 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication
|
||||||
final String[] arr = sj.split("@@@");
|
final String[] arr = sj.split("@@@");
|
||||||
if (arr.length == 3) {
|
if (arr.length == 3) {
|
||||||
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null;
|
final String issn = StringUtils.isNotBlank(arr[0]) ? arr[0].trim() : null;
|
||||||
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null;;
|
final String eissn = StringUtils.isNotBlank(arr[1]) ? arr[1].trim() : null;
|
||||||
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;;
|
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;
|
||||||
|
|
||||||
if (issn != null || eissn != null || lissn != null) {
|
if (issn != null || eissn != null || lissn != null) {
|
||||||
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,27 +3,19 @@ package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
||||||
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Arrays;
|
import java.util.stream.Collectors;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
|
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson;
|
import eu.dnetlib.dhp.oa.graph.raw.common.PacePerson;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.GeoLocation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
|
@ -52,7 +44,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Qualifier prepareLanguages(final Document doc) {
|
protected Qualifier prepareLanguages(final Document doc) {
|
||||||
return prepareQualifier(doc, "//dc:language", "dnet:languages", "dnet:languages");
|
return prepareQualifier(doc, "//dc:language", DNET_LANGUAGES, DNET_LANGUAGES);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -96,26 +88,22 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
final DataInfo info,
|
final DataInfo info,
|
||||||
final KeyValue collectedfrom,
|
final KeyValue collectedfrom,
|
||||||
final KeyValue hostedby) {
|
final KeyValue hostedby) {
|
||||||
final List<Instance> res = new ArrayList<>();
|
|
||||||
for (final Object o : doc.selectNodes("//dc:identifier")) {
|
|
||||||
final String url = ((Node) o).getText().trim();
|
|
||||||
if (url.startsWith("http")) {
|
|
||||||
final Instance instance = new Instance();
|
final Instance instance = new Instance();
|
||||||
instance.setUrl(Arrays.asList(url));
|
|
||||||
instance
|
instance
|
||||||
.setInstancetype(
|
.setInstancetype(
|
||||||
prepareQualifier(
|
prepareQualifier(
|
||||||
doc,
|
doc,
|
||||||
"//dr:CobjCategory",
|
"//dr:CobjCategory",
|
||||||
"dnet:publication_resource",
|
DNET_PUBLICATION_RESOURCE,
|
||||||
"dnet:publication_resource"));
|
DNET_PUBLICATION_RESOURCE));
|
||||||
instance.setCollectedfrom(collectedfrom);
|
instance.setCollectedfrom(collectedfrom);
|
||||||
instance.setHostedby(hostedby);
|
instance.setHostedby(hostedby);
|
||||||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||||
instance
|
instance
|
||||||
.setAccessright(
|
.setAccessright(
|
||||||
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
|
||||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||||
instance
|
instance
|
||||||
|
@ -124,10 +112,19 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
instance
|
instance
|
||||||
.setProcessingchargecurrency(
|
.setProcessingchargecurrency(
|
||||||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||||
res.add(instance);
|
|
||||||
}
|
List<Node> nodes = Lists.newArrayList(doc.selectNodes("//dc:identifier"));
|
||||||
}
|
instance
|
||||||
return res;
|
.setUrl(
|
||||||
|
nodes
|
||||||
|
.stream()
|
||||||
|
.filter(n -> StringUtils.isNotBlank(n.getText()))
|
||||||
|
.map(n -> n.getText().trim())
|
||||||
|
.filter(u -> u.startsWith("http"))
|
||||||
|
.distinct()
|
||||||
|
.collect(Collectors.toCollection(ArrayList::new)));
|
||||||
|
|
||||||
|
return Lists.newArrayList(instance);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -241,27 +238,16 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
final String otherId = createOpenaireId(50, originalId, false);
|
final String otherId = createOpenaireId(50, originalId, false);
|
||||||
|
|
||||||
final Relation r1 = new Relation();
|
res
|
||||||
r1.setRelType("resultResult");
|
.add(
|
||||||
r1.setSubRelType("publicationDataset");
|
getRelation(
|
||||||
r1.setRelClass("isRelatedTo");
|
docId, otherId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
|
||||||
r1.setSource(docId);
|
lastUpdateTimestamp));
|
||||||
r1.setTarget(otherId);
|
res
|
||||||
r1.setCollectedfrom(Arrays.asList(collectedFrom));
|
.add(
|
||||||
r1.setDataInfo(info);
|
getRelation(
|
||||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
otherId, docId, RESULT_RESULT, PUBLICATION_DATASET, IS_RELATED_TO, collectedFrom, info,
|
||||||
res.add(r1);
|
lastUpdateTimestamp));
|
||||||
|
|
||||||
final Relation r2 = new Relation();
|
|
||||||
r2.setRelType("resultResult");
|
|
||||||
r2.setSubRelType("publicationDataset");
|
|
||||||
r2.setRelClass("isRelatedTo");
|
|
||||||
r2.setSource(otherId);
|
|
||||||
r2.setTarget(docId);
|
|
||||||
r2.setCollectedfrom(Arrays.asList(collectedFrom));
|
|
||||||
r2.setDataInfo(info);
|
|
||||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
|
||||||
res.add(r2);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
|
|
@ -4,16 +4,15 @@ package eu.dnetlib.dhp.oa.graph.raw;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.createOpenaireId;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.field;
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.structuredProperty;
|
||||||
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dom4j.Document;
|
import org.dom4j.Document;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||||
|
@ -27,6 +26,8 @@ import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
|
||||||
public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
|
public static final String HTTP_DX_DOI_PREIFX = "http://dx.doi.org/";
|
||||||
|
|
||||||
public OdfToOafMapper(final Map<String, String> code2name) {
|
public OdfToOafMapper(final Map<String, String> code2name) {
|
||||||
super(code2name);
|
super(code2name);
|
||||||
}
|
}
|
||||||
|
@ -62,7 +63,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
structuredProperty(
|
structuredProperty(
|
||||||
((Node) o).getText(),
|
((Node) o).getText(),
|
||||||
prepareQualifier(
|
prepareQualifier(
|
||||||
(Node) o, "./@nameIdentifierScheme", "dnet:pid_types", "dnet:pid_types"),
|
(Node) o, "./@nameIdentifierScheme", DNET_PID_TYPES, DNET_PID_TYPES),
|
||||||
info));
|
info));
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
|
@ -76,18 +77,19 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
final KeyValue hostedby) {
|
final KeyValue hostedby) {
|
||||||
|
|
||||||
final Instance instance = new Instance();
|
final Instance instance = new Instance();
|
||||||
|
final Set<String> url = new HashSet<>();
|
||||||
instance.setUrl(new ArrayList<>());
|
instance.setUrl(new ArrayList<>());
|
||||||
instance
|
instance
|
||||||
.setInstancetype(
|
.setInstancetype(
|
||||||
prepareQualifier(
|
prepareQualifier(
|
||||||
doc, "//dr:CobjCategory", "dnet:publication_resource", "dnet:publication_resource"));
|
doc, "//dr:CobjCategory", DNET_PUBLICATION_RESOURCE, DNET_PUBLICATION_RESOURCE));
|
||||||
instance.setCollectedfrom(collectedfrom);
|
instance.setCollectedfrom(collectedfrom);
|
||||||
instance.setHostedby(hostedby);
|
instance.setHostedby(hostedby);
|
||||||
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
instance.setDateofacceptance(field(doc.valueOf("//oaf:dateAccepted"), info));
|
||||||
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
instance.setDistributionlocation(doc.valueOf("//oaf:distributionlocation"));
|
||||||
instance
|
instance
|
||||||
.setAccessright(
|
.setAccessright(
|
||||||
prepareQualifier(doc, "//oaf:accessrights", "dnet:access_modes", "dnet:access_modes"));
|
prepareQualifier(doc, "//oaf:accessrights", DNET_ACCESS_MODES, DNET_ACCESS_MODES));
|
||||||
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
instance.setLicense(field(doc.valueOf("//oaf:license"), info));
|
||||||
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
instance.setRefereed(field(doc.valueOf("//oaf:refereed"), info));
|
||||||
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
|
instance.setProcessingchargeamount(field(doc.valueOf("//oaf:processingchargeamount"), info));
|
||||||
|
@ -96,17 +98,18 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
field(doc.valueOf("//oaf:processingchargeamount/@currency"), info));
|
||||||
|
|
||||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
|
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
|
||||||
instance.getUrl().add(((Node) o).getText().trim());
|
url.add(((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
|
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
|
||||||
instance.getUrl().add(((Node) o).getText().trim());
|
url.add(((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
|
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
|
||||||
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim());
|
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) {
|
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='DOI']")) {
|
||||||
instance.getUrl().add("http://dx.doi.org/" + ((Node) o).getText().trim());
|
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
|
instance.getUrl().addAll(url);
|
||||||
return Arrays.asList(instance);
|
return Arrays.asList(instance);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,8 +134,8 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
((Node) o).getText(),
|
((Node) o).getText(),
|
||||||
"UNKNOWN",
|
"UNKNOWN",
|
||||||
"UNKNOWN",
|
"UNKNOWN",
|
||||||
"dnet:dataCite_date",
|
DNET_DATA_CITE_DATE,
|
||||||
"dnet:dataCite_date",
|
DNET_DATA_CITE_DATE,
|
||||||
info));
|
info));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -171,7 +174,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Qualifier prepareLanguages(final Document doc) {
|
protected Qualifier prepareLanguages(final Document doc) {
|
||||||
return prepareQualifier(doc, "//datacite:language", "dnet:languages", "dnet:languages");
|
return prepareQualifier(doc, "//datacite:language", DNET_LANGUAGES, DNET_LANGUAGES);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -292,36 +295,29 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
final String otherId = createOpenaireId(50, originalId, false);
|
final String otherId = createOpenaireId(50, originalId, false);
|
||||||
final String type = ((Node) o).valueOf("@relationType");
|
final String type = ((Node) o).valueOf("@relationType");
|
||||||
|
|
||||||
if (type.equals("IsSupplementTo")) {
|
if (type.equalsIgnoreCase("IsSupplementTo")) {
|
||||||
res
|
res
|
||||||
.add(
|
.add(
|
||||||
prepareOtherResultRel(
|
getRelation(
|
||||||
collectedFrom,
|
docId, otherId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENT_TO, collectedFrom, info,
|
||||||
info,
|
lastUpdateTimestamp));
|
||||||
lastUpdateTimestamp,
|
|
||||||
docId,
|
|
||||||
otherId,
|
|
||||||
"supplement",
|
|
||||||
"isSupplementTo"));
|
|
||||||
res
|
res
|
||||||
.add(
|
.add(
|
||||||
prepareOtherResultRel(
|
getRelation(
|
||||||
collectedFrom,
|
otherId, docId, RESULT_RESULT, SUPPLEMENT, IS_SUPPLEMENTED_BY, collectedFrom, info,
|
||||||
info,
|
lastUpdateTimestamp));
|
||||||
lastUpdateTimestamp,
|
|
||||||
otherId,
|
|
||||||
docId,
|
|
||||||
"supplement",
|
|
||||||
"isSupplementedBy"));
|
|
||||||
} else if (type.equals("IsPartOf")) {
|
} else if (type.equals("IsPartOf")) {
|
||||||
|
|
||||||
res
|
res
|
||||||
.add(
|
.add(
|
||||||
prepareOtherResultRel(
|
getRelation(
|
||||||
collectedFrom, info, lastUpdateTimestamp, docId, otherId, "part", "IsPartOf"));
|
docId, otherId, RESULT_RESULT, PART, IS_PART_OF, collectedFrom, info,
|
||||||
|
lastUpdateTimestamp));
|
||||||
res
|
res
|
||||||
.add(
|
.add(
|
||||||
prepareOtherResultRel(
|
getRelation(
|
||||||
collectedFrom, info, lastUpdateTimestamp, otherId, docId, "part", "HasParts"));
|
otherId, docId, RESULT_RESULT, PART, HAS_PARTS, collectedFrom, info,
|
||||||
|
lastUpdateTimestamp));
|
||||||
} else {
|
} else {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -329,32 +325,12 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Relation prepareOtherResultRel(
|
|
||||||
final KeyValue collectedFrom,
|
|
||||||
final DataInfo info,
|
|
||||||
final long lastUpdateTimestamp,
|
|
||||||
final String source,
|
|
||||||
final String target,
|
|
||||||
final String subRelType,
|
|
||||||
final String relClass) {
|
|
||||||
final Relation r = new Relation();
|
|
||||||
r.setRelType("resultResult");
|
|
||||||
r.setSubRelType(subRelType);
|
|
||||||
r.setRelClass(relClass);
|
|
||||||
r.setSource(source);
|
|
||||||
r.setTarget(target);
|
|
||||||
r.setCollectedfrom(Arrays.asList(collectedFrom));
|
|
||||||
r.setDataInfo(info);
|
|
||||||
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Qualifier prepareResourceType(final Document doc, final DataInfo info) {
|
protected Qualifier prepareResourceType(final Document doc, final DataInfo info) {
|
||||||
return prepareQualifier(
|
return prepareQualifier(
|
||||||
doc,
|
doc,
|
||||||
"//*[local-name() = 'resource']//*[local-name() = 'resourceType']",
|
"//*[local-name() = 'resource']//*[local-name() = 'resourceType']",
|
||||||
"dnet:dataCite_resource",
|
DNET_DATA_CITE_RESOURCE,
|
||||||
"dnet:dataCite_resource");
|
DNET_DATA_CITE_RESOURCE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,27 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.raw.common;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.oa.graph.raw.common.OafMapperUtils.qualifier;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|
||||||
|
|
||||||
public class MigrationConstants {
|
|
||||||
|
|
||||||
public static final Qualifier PUBLICATION_RESULTTYPE_QUALIFIER = qualifier(
|
|
||||||
"publication", "publication", "dnet:result_typologies", "dnet:result_typologies");
|
|
||||||
public static final Qualifier DATASET_RESULTTYPE_QUALIFIER = qualifier(
|
|
||||||
"dataset", "dataset",
|
|
||||||
"dnet:result_typologies", "dnet:result_typologies");
|
|
||||||
public static final Qualifier SOFTWARE_RESULTTYPE_QUALIFIER = qualifier(
|
|
||||||
"software", "software",
|
|
||||||
"dnet:result_typologies", "dnet:result_typologies");
|
|
||||||
public static final Qualifier OTHER_RESULTTYPE_QUALIFIER = qualifier(
|
|
||||||
"other", "other",
|
|
||||||
"dnet:result_typologies", "dnet:result_typologies");
|
|
||||||
public static final Qualifier REPOSITORY_PROVENANCE_ACTIONS = qualifier(
|
|
||||||
"sysimport:crosswalk:repository", "sysimport:crosswalk:repository",
|
|
||||||
"dnet:provenanceActions", "dnet:provenanceActions");
|
|
||||||
public static final Qualifier ENTITYREGISTRY_PROVENANCE_ACTION = qualifier(
|
|
||||||
"sysimport:crosswalk:entityregistry", "sysimport:crosswalk:entityregistry",
|
|
||||||
"dnet:provenanceActions", "dnet:provenanceActions");
|
|
||||||
}
|
|
|
@ -115,11 +115,11 @@
|
||||||
<delete path="${contentPath}/db_claims"/>
|
<delete path="${contentPath}/db_claims"/>
|
||||||
</prepare>
|
</prepare>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/db_claims</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
|
||||||
<arg>-pgurl</arg><arg>${postgresURL}</arg>
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
<arg>-pguser</arg><arg>${postgresUser}</arg>
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
<arg>-a</arg><arg>claims</arg>
|
<arg>--action</arg><arg>claims</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="ImportODF_claims"/>
|
<ok to="ImportODF_claims"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -165,10 +165,10 @@
|
||||||
<delete path="${contentPath}/db_records"/>
|
<delete path="${contentPath}/db_records"/>
|
||||||
</prepare>
|
</prepare>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/db_records</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
|
||||||
<arg>-pgurl</arg><arg>${postgresURL}</arg>
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
<arg>-pguser</arg><arg>${postgresUser}</arg>
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="ImportODF"/>
|
<ok to="ImportODF"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -180,12 +180,12 @@
|
||||||
<delete path="${contentPath}/odf_records"/>
|
<delete path="${contentPath}/odf_records"/>
|
||||||
</prepare>
|
</prepare>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/odf_records</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>-f</arg><arg>ODF</arg>
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
||||||
<arg>-l</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
<arg>-i</arg><arg>cleaned</arg>
|
<arg>--mdInterpretation</arg><arg>cleaned</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="ImportOAF"/>
|
<ok to="ImportOAF"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -197,12 +197,12 @@
|
||||||
<delete path="${contentPath}/oaf_records"/>
|
<delete path="${contentPath}/oaf_records"/>
|
||||||
</prepare>
|
</prepare>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>-p</arg><arg>${contentPath}/oaf_records</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
|
||||||
<arg>-mongourl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>-mongodb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>-f</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
<arg>-l</arg><arg>store</arg>
|
<arg>--mdLayout</arg><arg>store</arg>
|
||||||
<arg>-i</arg><arg>cleaned</arg>
|
<arg>--mdInterpretation</arg><arg>cleaned</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="wait_import"/>
|
<ok to="wait_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -231,11 +231,11 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-s</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
|
||||||
<arg>-t</arg><arg>${workingDir}/entities_claim</arg>
|
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
|
||||||
<arg>-pgurl</arg><arg>${postgresURL}</arg>
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
<arg>-pguser</arg><arg>${postgresUser}</arg>
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="GenerateGraph_claims"/>
|
<ok to="GenerateGraph_claims"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -257,8 +257,8 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-s</arg><arg>${workingDir}/entities_claim</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/entities_claim</arg>
|
||||||
<arg>-g</arg><arg>${workingDir}/graph_claims</arg>
|
<arg>--graphRawPath</arg><arg>${workingDir}/graph_claims</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="wait_graphs"/>
|
<ok to="wait_graphs"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -280,11 +280,11 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-s</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_records,${contentPath}/oaf_records,${contentPath}/odf_records</arg>
|
||||||
<arg>-t</arg><arg>${workingDir}/entities</arg>
|
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
|
||||||
<arg>-pgurl</arg><arg>${postgresURL}</arg>
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
<arg>-pguser</arg><arg>${postgresUser}</arg>
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="GenerateGraph"/>
|
<ok to="GenerateGraph"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -307,8 +307,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7680
|
--conf spark.sql.shuffle.partitions=7680
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-s</arg><arg>${workingDir}/entities</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/entities</arg>
|
||||||
<arg>-g</arg><arg>${workingDir}/graph_raw</arg>
|
<arg>--graphRawPath</arg><arg>${workingDir}/graph_raw</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="wait_graphs"/>
|
<ok to="wait_graphs"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
<workflow-app name="import db entities (step 1)" xmlns="uri:oozie:workflow:0.5">
|
<workflow-app name="import DB entities" xmlns="uri:oozie:workflow:0.5">
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>migrationPathStep1</name>
|
<name>contentPath</name>
|
||||||
<description>the base path to store hdfs file</description>
|
<description>path location to store (or reuse) content from the aggregator</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>postgresURL</name>
|
<name>postgresURL</name>
|
||||||
|
@ -16,6 +16,7 @@
|
||||||
<name>postgresPassword</name>
|
<name>postgresPassword</name>
|
||||||
<description>the password postgres</description>
|
<description>the password postgres</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>sparkDriverMemory</name>
|
<name>sparkDriverMemory</name>
|
||||||
<description>memory for driver process</description>
|
<description>memory for driver process</description>
|
||||||
|
@ -28,31 +29,81 @@
|
||||||
<name>sparkExecutorCores</name>
|
<name>sparkExecutorCores</name>
|
||||||
<description>number of cores used by single executor</description>
|
<description>number of cores used by single executor</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="ResetWorkingPath"/>
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="ImportDB"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<action name="ResetWorkingPath">
|
<action name="ImportDB">
|
||||||
<fs>
|
<java>
|
||||||
<delete path='${migrationPathStep1}/db_records'/>
|
<prepare>
|
||||||
</fs>
|
<delete path="${contentPath}/db_records"/>
|
||||||
<ok to="ImportDB"/>
|
</prepare>
|
||||||
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
||||||
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_records</arg>
|
||||||
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="ImportDB_claims"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="ImportDB">
|
<action name="ImportDB_claims">
|
||||||
<java>
|
<java>
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<prepare>
|
||||||
<name-node>${nameNode}</name-node>
|
<delete path="${contentPath}/db_claims"/>
|
||||||
<main-class>eu.dnetlib.dhp.migration.step1.MigrateDbEntitiesApplication</main-class>
|
</prepare>
|
||||||
<arg>-p</arg><arg>${migrationPathStep1}/db_records</arg>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
|
||||||
<arg>-pgurl</arg><arg>${postgresURL}</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
|
||||||
<arg>-pguser</arg><arg>${postgresUser}</arg>
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
<arg>-pgpasswd</arg><arg>${postgresPassword}</arg>
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
|
<arg>--action</arg><arg>claims</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -405,6 +405,9 @@
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.speculation=false
|
||||||
|
--conf spark.hadoop.mapreduce.map.speculative=false
|
||||||
|
--conf spark.hadoop.mapreduce.reduce.speculative=false
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${workingDir}/xml</arg>
|
<arg>--inputPath</arg><arg>${workingDir}/xml</arg>
|
||||||
<arg>--isLookupUrl</arg> <arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg> <arg>${isLookupUrl}</arg>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<artifactId>dhp-stats-update</artifactId>
|
<artifactId>dhp-stats-update</artifactId>
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages;
|
DROP TABLE IF EXISTS ${stats_db_name}.datasource_languages;
|
||||||
CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from openaire.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages;
|
CREATE TABLE ${stats_db_name}.datasource_languages AS SELECT substr(d.id, 4) as id, langs.languages as language from ${openaire_db_name}.datasource d LATERAL VIEW explode(d.odlanguages.value) langs as languages;
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<parent>
|
<parent>
|
||||||
<artifactId>dhp-workflows</artifactId>
|
<artifactId>dhp-workflows</artifactId>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp</artifactId>
|
<artifactId>dhp</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
<relativePath>../</relativePath>
|
<relativePath>../</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@
|
||||||
<module>dhp-dedup-scholexplorer</module>
|
<module>dhp-dedup-scholexplorer</module>
|
||||||
<module>dhp-graph-provision-scholexplorer</module>
|
<module>dhp-graph-provision-scholexplorer</module>
|
||||||
<module>dhp-stats-update</module>
|
<module>dhp-stats-update</module>
|
||||||
|
<module>dhp-broker-events</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
<pluginRepositories>
|
<pluginRepositories>
|
||||||
|
|
2
pom.xml
2
pom.xml
|
@ -3,7 +3,7 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp</artifactId>
|
<artifactId>dhp</artifactId>
|
||||||
<version>1.1.7-SNAPSHOT</version>
|
<version>1.2.1-SNAPSHOT</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<licenses>
|
<licenses>
|
||||||
|
|
Loading…
Reference in New Issue