From 7ba586d2e587c117aacf50fb6e1e9f9b034df35c Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 17 Dec 2019 16:24:49 +0100 Subject: [PATCH 01/21] oozie workflow aimed to build the adjacency lists representation of the graph, needed to build the records to be indexed --- .../job-override.properties | 3 + dhp-workflows/dhp-graph-provision/pom.xml | 37 +++++++ .../java/eu/dnetlib/dhp/graph/EntityNode.java | 4 + .../dnetlib/dhp/graph/GraphMappingUtils.java | 23 ++++ .../eu/dnetlib/dhp/graph/RelatedEntity.java | 69 ++++++++++++ .../dhp/graph/SparkGraphIndexingJob.java | 102 ++++++++++++++++++ .../dhp/graph/input_graph_parameters.json | 5 + .../dhp/graph/oozie_app/config-default.xml | 26 +++++ .../dnetlib/dhp/graph/oozie_app/workflow.xml | 46 ++++++++ 9 files changed, 315 insertions(+) create mode 100644 dhp-workflows/dhp-graph-provision/job-override.properties create mode 100644 dhp-workflows/dhp-graph-provision/pom.xml create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties new file mode 100644 index 000000000..31f7f88f5 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -0,0 +1,3 @@ +sparkDriverMemory=16G +sparkExecutorMemory=16G +hive_db_name=claudio \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/pom.xml b/dhp-workflows/dhp-graph-provision/pom.xml new file mode 100644 index 000000000..d47463774 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/pom.xml @@ -0,0 +1,37 @@ + + + + dhp-workflows + eu.dnetlib.dhp + 1.0.5-SNAPSHOT + + 4.0.0 + + dhp-graph-provision + + + + + org.apache.spark + spark-core_2.11 + + + org.apache.spark + spark-sql_2.11 + + + + eu.dnetlib.dhp + dhp-common + ${project.version} + + + eu.dnetlib.dhp + dhp-schemas + ${project.version} + + + + + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java new file mode 100644 index 000000000..be1babae2 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java @@ -0,0 +1,4 @@ +package eu.dnetlib.dhp.graph; + +public class EntityNode { +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java new file mode 100644 index 000000000..ab19ff2b5 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java @@ -0,0 +1,23 @@ +package eu.dnetlib.dhp.graph; + +import com.google.common.collect.Maps; +import eu.dnetlib.dhp.schema.oaf.*; + +import java.util.Map; + +public class GraphMappingUtils { + + public final static Map types = Maps.newHashMap(); + + static { + types.put("datasource", Datasource.class); + types.put("organization", Organization.class); + types.put("project", Project.class); + types.put("dataset", Dataset.class); + types.put("otherresearchproduct", OtherResearchProduct.class); + types.put("software", Software.class); + types.put("publication", Publication.class); + types.put("relation", Relation.class); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java new file mode 100644 index 000000000..dbab04f16 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java @@ -0,0 +1,69 @@ +package eu.dnetlib.dhp.graph; + +import java.io.Serializable; + +public class RelatedEntity implements Serializable { + + private String relType; + + private String subRelType; + + private String relClass; + + private String type; + + private String payload; + + public RelatedEntity(String relType, String subRelType, String relClass, String type, String payload) { + this.relType = relType; + this.subRelType = subRelType; + this.relClass = relClass; + this.type = type; + this.payload = payload; + } + + public String getRelType() { + return relType; + } + + public RelatedEntity setRelType(String relType) { + this.relType = relType; + return this; + } + + public String getSubRelType() { + return subRelType; + } + + public RelatedEntity setSubRelType(String subRelType) { + this.subRelType = subRelType; + return this; + } + + public String getRelClass() { + return relClass; + } + + public RelatedEntity setRelClass(String relClass) { + this.relClass = relClass; + return this; + } + + public String getType() { + return type; + } + + public RelatedEntity setType(String type) { + this.type = type; + return this; + } + + public String getPayload() { + return payload; + } + + public RelatedEntity setPayload(String payload) { + this.payload = payload; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java new file mode 100644 index 000000000..04711efbd --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java @@ -0,0 +1,102 @@ +package eu.dnetlib.dhp.graph; + +import com.google.common.collect.Sets; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.schema.common.EntityPayload; +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.compress.GzipCodec; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.SaveMode; +import org.apache.spark.sql.SparkSession; +import scala.Tuple2; +import scala.runtime.AbstractFunction1; + +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.apache.commons.lang3.StringUtils.substringAfter; +import static org.apache.commons.lang3.StringUtils.substringBefore; +import static org.apache.spark.sql.Encoders.bean; + +public class SparkGraphIndexingJob { + + private final static String ENTITY_NODES_PATH = "/tmp/entity_node"; + private static final long LIMIT = 100; + + public static void main(String[] args) throws Exception { + + final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphIndexingJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); + parser.parseArgument(args); + final SparkSession spark = SparkSession + .builder() + .appName(SparkGraphIndexingJob.class.getSimpleName()) + .master(parser.get("master")) + .config("hive.metastore.uris", parser.get("hive_metastore_uris")) + .config("spark.driver.cores", 1) + .config("spark.executor.cores", 1) + .config("spark.yarn.executor.memoryOverhead", "4G") + .config("spark.yarn.driver.memoryOverhead", "4G") + .enableHiveSupport() + .getOrCreate(); + + final String hiveDbName = parser.get("hive_db_name"); + + final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); + if (fs.exists(new Path(ENTITY_NODES_PATH))) { + fs.delete(new Path(ENTITY_NODES_PATH), true); + } + + spark + .sql(getJoinEntitiesSQL(hiveDbName)) + .transform(toEntityNode()) + /* + .map((MapFunction) r -> { + return null; + }, bean(String.class)) + */ + .rdd() + + .saveAsTextFile(ENTITY_NODES_PATH, GzipCodec.class); + } + + private static AbstractFunction1, Dataset> toEntityNode() { + return new AbstractFunction1, Dataset>() { + @Override + public Dataset apply(Dataset d) { + return d.map((MapFunction) r -> { + + final List res = r.getList(r.fieldIndex("related_entity")); + final byte[] payload = r.getAs("payload"); + return new EntityNode(r.getAs("id"), r.getAs("type"), new String(payload)) + .setRelatedEntities(res + .stream() + .map(re -> new Tuple2<>(substringBefore(re, "@@"), substringAfter(re, "@@"))) + .map(re -> new RelatedEntity(r.getAs("reltype"), r.getAs("subreltype"), r.getAs("relclass"), re._1(), re._2())) + .limit(LIMIT) + .collect(Collectors.toList())); + + }, bean(EntityNode.class)); + } + }; + } + + private static String getJoinEntitiesSQL(String hiveDbName) { + return String.format( + "SELECT " + + "E_s.id AS id, " + + "E_s.type AS type, " + + "E_s.payload AS payload, " + + "r.reltype AS reltype, r.subreltype AS subreltype, r.relclass AS relclass, " + + "collect_list(concat(E_t.type, '@@', E_t.payload)) AS related_entity " + + "FROM %s.entities " + "" /*"TABLESAMPLE(0.1 PERCENT) "*/ + "E_s " + + "LEFT JOIN %s.relation r ON (r.source = E_s.id) " + + "JOIN %s.entities E_t ON (E_t.id = r.target) \n" + + "GROUP BY E_s.id, E_s.type, E_s.payload, r.reltype, r.subreltype, r.relclass", hiveDbName, hiveDbName, hiveDbName); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json new file mode 100644 index 000000000..613389d79 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json @@ -0,0 +1,5 @@ +[ + {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, + {"paramName":"h", "paramLongName":"hive_metastore_uris","paramDescription": "the hive metastore uris", "paramRequired": true}, + {"paramName":"db", "paramLongName":"hive_db_name", "paramDescription": "the target hive database name", "paramRequired": true} +] \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml new file mode 100644 index 000000000..fcab9dd00 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml @@ -0,0 +1,26 @@ + + + jobTracker + yarnRM + + + nameNode + hdfs://nameservice1 + + + oozie.use.system.libpath + true + + + oozie.action.sharelib.for.spark + spark2 + + + hive_metastore_uris + thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083 + + + hive_db_name + openaire + + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml new file mode 100644 index 000000000..473b697cd --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -0,0 +1,46 @@ + + + + hive_db_name + the target hive database name + + + sparkDriverMemory + memory for driver process + + + sparkExecutorMemory + memory for individual executor + + + sparkExecutorCores + number of cores used by single executor + + + + + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] + + + + + ${jobTracker} + ${nameNode} + yarn-cluster + cluster + GraphIndexing + eu.dnetlib.dhp.graph.SparkGraphIndexingJob + dhp-graph-provision-${projectVersion}.jar + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + -mt yarn-cluster + --hive_db_name${hive_db_name} + --hive_metastore_uris${hive_metastore_uris} + + + + + + + \ No newline at end of file From 97c239ee0d4a9fbfd7d70dd06ff1f7777de7e881 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 16 Jan 2020 12:02:28 +0200 Subject: [PATCH 02/21] WIP: trying to find a way to build the records for the index --- dhp-schemas/pom.xml | 5 + .../java/eu/dnetlib/dhp/schema/oaf/Oaf.java | 13 ++ .../job-override.properties | 8 +- .../eu/dnetlib/dhp/graph/EntityRelEntity.java | 53 +++++++ .../eu/dnetlib/dhp/graph/GraphJoiner.java | 139 ++++++++++++++++++ .../dhp/graph/SparkGraphIndexingJob.java | 70 +-------- .../java/eu/dnetlib/dhp/graph/TypedRow.java | 44 ++++++ .../dhp/graph/input_graph_parameters.json | 3 +- .../dnetlib/dhp/graph/oozie_app/workflow.xml | 3 +- 9 files changed, 265 insertions(+), 73 deletions(-) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java diff --git a/dhp-schemas/pom.xml b/dhp-schemas/pom.xml index 20896a61d..491cbe668 100644 --- a/dhp-schemas/pom.xml +++ b/dhp-schemas/pom.xml @@ -26,6 +26,11 @@ commons-lang3 + + com.fasterxml.jackson.core + jackson-databind + + eu.dnetlib.dhp dhp-common diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java index 352ebbc6e..010633ec3 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Oaf.java @@ -1,5 +1,8 @@ package eu.dnetlib.dhp.schema.oaf; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + import java.io.Serializable; public abstract class Oaf implements Serializable { @@ -23,4 +26,14 @@ public abstract class Oaf implements Serializable { public void setLastupdatetimestamp(Long lastupdatetimestamp) { this.lastupdatetimestamp = lastupdatetimestamp; } + + @Override + public String toString() { + try { + return new ObjectMapper().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties index 31f7f88f5..882053c1a 100644 --- a/dhp-workflows/dhp-graph-provision/job-override.properties +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -1,3 +1,5 @@ -sparkDriverMemory=16G -sparkExecutorMemory=16G -hive_db_name=claudio \ No newline at end of file +sparkDriverMemory=7G +sparkExecutorMemory=7G +sparkExecutorMemoryOverhead=5G +hive_db_name=claudio +sourcePath=/tmp/db_openaireplus_services_beta.export.2019.11.06 \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java new file mode 100644 index 000000000..ac89e4351 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java @@ -0,0 +1,53 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.dhp.schema.oaf.Relation; + +import java.io.Serializable; + +public class EntityRelEntity implements Serializable { + private TypedRow source; + private Relation relation; + private TypedRow target; + + public EntityRelEntity(TypedRow source) { + this.source = source; + } + + public TypedRow getSource() { + return source; + } + + public EntityRelEntity setSource(TypedRow source) { + this.source = source; + return this; + } + + public Relation getRelation() { + return relation; + } + + public EntityRelEntity setRelation(Relation relation) { + this.relation = relation; + return this; + } + + public TypedRow getTarget() { + return target; + } + + public EntityRelEntity setTarget(TypedRow target) { + this.target = target; + return this; + } + + @Override + public String toString() { + try { + return new ObjectMapper().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java new file mode 100644 index 000000000..5764642dc --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -0,0 +1,139 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.dhp.schema.oaf.*; +import org.apache.hadoop.io.Text; +import org.apache.spark.api.java.JavaPairRDD; +import org.apache.spark.api.java.JavaRDD; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.api.java.Optional; +import org.apache.spark.api.java.function.PairFunction; +import org.apache.spark.sql.Encoders; +import org.apache.spark.sql.SaveMode; +import org.apache.spark.sql.SparkSession; +import scala.Tuple2; + +import java.io.Serializable; + +public class GraphJoiner implements Serializable { + + public static final int MAX_RELS = 100; + + public void join(final SparkSession spark, final String inputPath, final String hiveDbName, final String outPath) { + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + /* + JavaPairRDD entities = sc.sequenceFile(inputPath + "/publication", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Publication.class)) + .map(oaf -> new TypedRow("publication", oaf)) + .mapToPair(toPair()); + + */ + + JavaPairRDD entities = sc.sequenceFile(inputPath + "/datasource", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Datasource.class)) + .map(oaf -> new TypedRow("datasource", oaf)) + .mapToPair(toPair()) + .union(sc.sequenceFile(inputPath + "/organization", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Organization.class)) + .map(oaf -> new TypedRow("organization", oaf)) + .mapToPair(toPair())) + .union(sc.sequenceFile(inputPath + "/project", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Project.class)) + .map(oaf -> new TypedRow("project", oaf)) + .mapToPair(toPair())) + .union(sc.sequenceFile(inputPath + "/dataset", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Dataset.class)) + .map(oaf -> new TypedRow("dataset", oaf)) + .mapToPair(toPair())) + .union(sc.sequenceFile(inputPath + "/otherresearchproduct", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), OtherResearchProduct.class)) + .map(oaf -> new TypedRow("otherresearchproduct", oaf)) + .mapToPair(toPair())) + .union(sc.sequenceFile(inputPath + "/software", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Software.class)) + .map(oaf -> new TypedRow("software", oaf)) + .mapToPair(toPair())); + /* + .union(sc.sequenceFile(inputPath + "/publication", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Publication.class)) + .map(oaf -> new TypedRow("publication", oaf)) + .mapToPair(toPair())); + + */ + + /* + JavaRDD rels = sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) + .map(item -> new ObjectMapper().readValue(item._2().toString(), Relation.class)) + .map(oaf -> new TypedRow("relation", oaf)) + .mapToPair(toPair()) + .groupByKey() + .map(t -> Iterables.limit(t._2(), MAX_RELS)) + .flatMap(t -> t.iterator()) + .map(t -> (Relation) t.getOaf()); + + spark.createDataset(rels.rdd(), Encoders.bean(Relation.class)) + .write() + .mode(SaveMode.Overwrite) + .saveAsTable(hiveDbName + ".relation_100"); + */ + + JavaPairRDD bounded_rels = spark.table(hiveDbName + ".relation_" + MAX_RELS) + .as(Encoders.bean(Relation.class)) + .javaRDD() + .map(r -> new TypedRow("relation", r)) + .mapToPair(toPair()); + + // build the adjacency list: e -> r + JavaPairRDD>> adjacency_list = entities.leftOuterJoin(bounded_rels); + + JavaRDD linked_entities = adjacency_list + .mapToPair(toPairTarget()) // make rel.targetid explicit so that we can join it + .leftOuterJoin(entities) // again with the entities to get the target entity + .map(l -> toEntityRelEntity(l)); // and map it to a more readable representation + + spark.createDataFrame(linked_entities, EntityRelEntity.class) + .write() + .mode(SaveMode.Overwrite) + .saveAsTable(hiveDbName + ".linked_entities"); + } + + private EntityRelEntity toEntityRelEntity(Tuple2>>, Optional>> l) { + // extract the entity source + final EntityRelEntity res = new EntityRelEntity(l._2()._1()._2()._1()); + + if(l._2()._1()._2()._2().isPresent() && l._2()._2().isPresent()) { + + // extract the relationship + res.setRelation((Relation) l._2()._1()._2()._2().get().getOaf()); + + // extract the related entity + res.setTarget(l._2()._2().get()); + } + + return res; + } + + private PairFunction>>, String, Tuple2>>> toPairTarget() { + return e -> { + Optional o = e._2()._2(); + if (o.isPresent()) { + return new Tuple2<>(((Relation) o.get().getOaf()).getTarget(), e); + } else { + return new Tuple2<>(null, e); + } + }; + } + + private PairFunction toPair() { + return e -> { + if (!"relation".equals(e.getType())) { + return new Tuple2<>( ((OafEntity) e.getOaf()).getId(), e); + } else { + return new Tuple2<>( ((Relation) e.getOaf()).getSource(), e); + } + }; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java index 04711efbd..ce8e7e396 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java @@ -1,32 +1,14 @@ package eu.dnetlib.dhp.graph; -import com.google.common.collect.Sets; import eu.dnetlib.dhp.application.ArgumentApplicationParser; -import eu.dnetlib.dhp.schema.common.EntityPayload; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.compress.GzipCodec; -import org.apache.spark.api.java.function.MapFunction; -import org.apache.spark.sql.Dataset; -import org.apache.spark.sql.Row; -import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; -import scala.Tuple2; -import scala.runtime.AbstractFunction1; - -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import static org.apache.commons.lang3.StringUtils.substringAfter; -import static org.apache.commons.lang3.StringUtils.substringBefore; -import static org.apache.spark.sql.Encoders.bean; public class SparkGraphIndexingJob { private final static String ENTITY_NODES_PATH = "/tmp/entity_node"; - private static final long LIMIT = 100; public static void main(String[] args) throws Exception { @@ -37,13 +19,10 @@ public class SparkGraphIndexingJob { .appName(SparkGraphIndexingJob.class.getSimpleName()) .master(parser.get("master")) .config("hive.metastore.uris", parser.get("hive_metastore_uris")) - .config("spark.driver.cores", 1) - .config("spark.executor.cores", 1) - .config("spark.yarn.executor.memoryOverhead", "4G") - .config("spark.yarn.driver.memoryOverhead", "4G") .enableHiveSupport() .getOrCreate(); + final String inputPath = parser.get("sourcePath"); final String hiveDbName = parser.get("hive_db_name"); final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); @@ -51,52 +30,7 @@ public class SparkGraphIndexingJob { fs.delete(new Path(ENTITY_NODES_PATH), true); } - spark - .sql(getJoinEntitiesSQL(hiveDbName)) - .transform(toEntityNode()) - /* - .map((MapFunction) r -> { - return null; - }, bean(String.class)) - */ - .rdd() - - .saveAsTextFile(ENTITY_NODES_PATH, GzipCodec.class); - } - - private static AbstractFunction1, Dataset> toEntityNode() { - return new AbstractFunction1, Dataset>() { - @Override - public Dataset apply(Dataset d) { - return d.map((MapFunction) r -> { - - final List res = r.getList(r.fieldIndex("related_entity")); - final byte[] payload = r.getAs("payload"); - return new EntityNode(r.getAs("id"), r.getAs("type"), new String(payload)) - .setRelatedEntities(res - .stream() - .map(re -> new Tuple2<>(substringBefore(re, "@@"), substringAfter(re, "@@"))) - .map(re -> new RelatedEntity(r.getAs("reltype"), r.getAs("subreltype"), r.getAs("relclass"), re._1(), re._2())) - .limit(LIMIT) - .collect(Collectors.toList())); - - }, bean(EntityNode.class)); - } - }; - } - - private static String getJoinEntitiesSQL(String hiveDbName) { - return String.format( - "SELECT " + - "E_s.id AS id, " + - "E_s.type AS type, " + - "E_s.payload AS payload, " + - "r.reltype AS reltype, r.subreltype AS subreltype, r.relclass AS relclass, " + - "collect_list(concat(E_t.type, '@@', E_t.payload)) AS related_entity " + - "FROM %s.entities " + "" /*"TABLESAMPLE(0.1 PERCENT) "*/ + "E_s " + - "LEFT JOIN %s.relation r ON (r.source = E_s.id) " + - "JOIN %s.entities E_t ON (E_t.id = r.target) \n" + - "GROUP BY E_s.id, E_s.type, E_s.payload, r.reltype, r.subreltype, r.relclass", hiveDbName, hiveDbName, hiveDbName); + new GraphJoiner().join(spark, inputPath, hiveDbName, ENTITY_NODES_PATH); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java new file mode 100644 index 000000000..5c933ca80 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java @@ -0,0 +1,44 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.dhp.schema.oaf.Oaf; + +import java.io.Serializable; + +public class TypedRow implements Serializable { + private String type; + private Oaf oaf; + + public TypedRow(String type, Oaf oaf) { + this.type = type; + this.oaf = oaf; + } + + public String getType() { + return type; + } + + public TypedRow setType(String type) { + this.type = type; + return this; + } + + public Oaf getOaf() { + return oaf; + } + + public TypedRow setOaf(Oaf oaf) { + this.oaf = oaf; + return this; + } + + @Override + public String toString() { + try { + return new ObjectMapper().writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json index 613389d79..a197abc78 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json @@ -1,5 +1,6 @@ [ {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {"paramName":"h", "paramLongName":"hive_metastore_uris","paramDescription": "the hive metastore uris", "paramRequired": true}, - {"paramName":"db", "paramLongName":"hive_db_name", "paramDescription": "the target hive database name", "paramRequired": true} + {"paramName":"db", "paramLongName":"hive_db_name", "paramDescription": "the target hive database name", "paramRequired": true}, + {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true} ] \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml index 473b697cd..00a890268 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -33,8 +33,9 @@ GraphIndexing eu.dnetlib.dhp.graph.SparkGraphIndexingJob dhp-graph-provision-${projectVersion}.jar - --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" + --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.yarn.executor.memoryOverhead=${sparkExecutorMemoryOverhead} -mt yarn-cluster + --sourcePath${sourcePath} --hive_db_name${hive_db_name} --hive_metastore_uris${hive_metastore_uris} From 63c0db4ff8b8d62a062088d065003680d65bedf4 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 16 Jan 2020 15:54:53 +0200 Subject: [PATCH 03/21] instance URLs must be repeatable --- dhp-schemas/pom.xml | 2 +- .../src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dhp-schemas/pom.xml b/dhp-schemas/pom.xml index 491cbe668..ec5af8d3c 100644 --- a/dhp-schemas/pom.xml +++ b/dhp-schemas/pom.xml @@ -5,7 +5,7 @@ eu.dnetlib.dhp dhp - 1.0.5-SNAPSHOT + 1.1.5-SNAPSHOT ../ diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java index f27704c5c..8726b85ce 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java @@ -1,6 +1,7 @@ package eu.dnetlib.dhp.schema.oaf; import java.io.Serializable; +import java.util.List; public class Instance implements Serializable { @@ -12,7 +13,7 @@ public class Instance implements Serializable { private KeyValue hostedby; - private String url; + private List url; // other research products specifc private String distributionlocation; @@ -53,11 +54,11 @@ public class Instance implements Serializable { this.hostedby = hostedby; } - public String getUrl() { + public List getUrl() { return url; } - public void setUrl(String url) { + public void setUrl(List url) { this.url = url; } From 799929c1e3e40534f89be315d1884eb135e63516 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 21 Jan 2020 16:35:44 +0100 Subject: [PATCH 04/21] joining entities using T x R x S method with groupByKey --- dhp-schemas/pom.xml | 2 +- .../eu/dnetlib/dhp/schema/dli/Entity.java | 118 ----------- .../java/eu/dnetlib/dhp/schema/dli/Pid.java | 33 ---- .../eu/dnetlib/dhp/schema/dli/Provenance.java | 35 ---- .../eu/dnetlib/dhp/schema/dli/Relation.java | 47 ----- .../dhp/schema/dli/RelationSemantic.java | 16 -- .../eu/dnetlib/dhp/schema/dli/Subject.java | 35 ---- .../job-override.properties | 1 - dhp-workflows/dhp-graph-provision/pom.xml | 4 + .../java/eu/dnetlib/dhp/graph/EntityNode.java | 4 - .../eu/dnetlib/dhp/graph/EntityRelEntity.java | 32 +-- .../eu/dnetlib/dhp/graph/GraphJoiner.java | 186 ++++++++---------- .../eu/dnetlib/dhp/graph/LinkedEntity.java | 29 +++ .../eu/dnetlib/dhp/graph/RelatedEntity.java | 69 ------- .../dhp/graph/SparkGraphIndexingJob.java | 17 +- .../main/java/eu/dnetlib/dhp/graph/Tuple.java | 31 +++ .../java/eu/dnetlib/dhp/graph/TypedRow.java | 52 +++-- .../dhp/graph/oozie_app/config-default.xml | 8 + .../dnetlib/dhp/graph/oozie_app/workflow.xml | 19 +- 19 files changed, 238 insertions(+), 500 deletions(-) delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Entity.java delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Pid.java delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Provenance.java delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/RelationSemantic.java delete mode 100644 dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Subject.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java diff --git a/dhp-schemas/pom.xml b/dhp-schemas/pom.xml index ec5af8d3c..491cbe668 100644 --- a/dhp-schemas/pom.xml +++ b/dhp-schemas/pom.xml @@ -5,7 +5,7 @@ eu.dnetlib.dhp dhp - 1.1.5-SNAPSHOT + 1.0.5-SNAPSHOT ../ diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Entity.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Entity.java deleted file mode 100644 index 894d54eaf..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Entity.java +++ /dev/null @@ -1,118 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -import java.io.Serializable; -import java.util.List; - -public class Entity implements Serializable { - - private String identifier; - - private List pid; - - private List title; - - private List date; - - private String typology; - - private List authors; - - private List subject; - - private String description; - - private String completionStatus; - - private List collectedFrom; - - private List publisher; - - - public String getIdentifier() { - return identifier; - } - - public void setIdentifier(String identifier) { - this.identifier = identifier; - } - - public List getPid() { - return pid; - } - - public void setPid(List pid) { - this.pid = pid; - } - - public List getTitle() { - return title; - } - - public void setTitle(List title) { - this.title = title; - } - - public List getDate() { - return date; - } - - public void setDate(List date) { - this.date = date; - } - - public String getTypology() { - return typology; - } - - public void setTypology(String typology) { - this.typology = typology; - } - - public List getAuthors() { - return authors; - } - - public void setAuthors(List authors) { - this.authors = authors; - } - - public List getSubject() { - return subject; - } - - public void setSubject(List subject) { - this.subject = subject; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public List getCollectedFrom() { - return collectedFrom; - } - - public void setCollectedFrom(List collectedFrom) { - this.collectedFrom = collectedFrom; - } - - public List getPublisher() { - return publisher; - } - - public void setPublisher(List publisher) { - this.publisher = publisher; - } - - public String getCompletionStatus() { - return completionStatus; - } - - public void setCompletionStatus(String completionStatus) { - this.completionStatus = completionStatus; - } -} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Pid.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Pid.java deleted file mode 100644 index 252245f45..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Pid.java +++ /dev/null @@ -1,33 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -import eu.dnetlib.dhp.utils.DHPUtils; -import org.apache.commons.lang3.StringUtils; - -public class Pid { - - private String pid; - - private String pidType; - - public String getPid() { - return pid; - } - - public void setPid(String pid) { - this.pid = pid; - } - - public String getPidType() { - return pidType; - } - - public void setPidType(String pidType) { - this.pidType = pidType; - } - - public String generateId() { - if(StringUtils.isEmpty(pid) || StringUtils.isEmpty(pidType)) - return null; - return DHPUtils.md5(String.format("%s::%s", pid, pidType)); - } -} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Provenance.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Provenance.java deleted file mode 100644 index 300b1134b..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Provenance.java +++ /dev/null @@ -1,35 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -public class Provenance { - - private String datasourceId; - - private String datasourceName; - - private String completionStatus; - - - public String getDatasourceId() { - return datasourceId; - } - - public void setDatasourceId(String datasourceId) { - this.datasourceId = datasourceId; - } - - public String getDatasourceName() { - return datasourceName; - } - - public void setDatasourceName(String datasourceName) { - this.datasourceName = datasourceName; - } - - public String getCompletionStatus() { - return completionStatus; - } - - public void setCompletionStatus(String completionStatus) { - this.completionStatus = completionStatus; - } -} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java deleted file mode 100644 index b83cccb73..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Relation.java +++ /dev/null @@ -1,47 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -import java.io.Serializable; -import java.util.List; - -public class Relation implements Serializable { - - private String source; - - private String target; - - private List provenance; - - private RelationSemantic semantic; - - public String getSource() { - return source; - } - - public void setSource(String source) { - this.source = source; - } - - public String getTarget() { - return target; - } - - public void setTarget(String target) { - this.target = target; - } - - public List getProvenance() { - return provenance; - } - - public void setProvenance(List provenance) { - this.provenance = provenance; - } - - public RelationSemantic getSemantic() { - return semantic; - } - - public void setSemantic(RelationSemantic semantic) { - this.semantic = semantic; - } -} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/RelationSemantic.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/RelationSemantic.java deleted file mode 100644 index ff871ef2d..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/RelationSemantic.java +++ /dev/null @@ -1,16 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -import java.io.Serializable; - -public class RelationSemantic extends Subject implements Serializable { - - public String inverse; - - public String getInverse() { - return inverse; - } - - public void setInverse(String inverse) { - this.inverse = inverse; - } -} diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Subject.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Subject.java deleted file mode 100644 index bd89bc6dd..000000000 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/dli/Subject.java +++ /dev/null @@ -1,35 +0,0 @@ -package eu.dnetlib.dhp.schema.dli; - -import java.io.Serializable; - -public class Subject implements Serializable { - - private String schema; - - private String value; - - public Subject() { - - } - - public Subject(String schema, String value) { - this.schema = schema; - this.value = value; - } - - public String getSchema() { - return schema; - } - - public void setSchema(String schema) { - this.schema = schema; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } -} diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties index 882053c1a..1870b0e6e 100644 --- a/dhp-workflows/dhp-graph-provision/job-override.properties +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -1,5 +1,4 @@ sparkDriverMemory=7G sparkExecutorMemory=7G -sparkExecutorMemoryOverhead=5G hive_db_name=claudio sourcePath=/tmp/db_openaireplus_services_beta.export.2019.11.06 \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/pom.xml b/dhp-workflows/dhp-graph-provision/pom.xml index d47463774..62d8ac2ae 100644 --- a/dhp-workflows/dhp-graph-provision/pom.xml +++ b/dhp-workflows/dhp-graph-provision/pom.xml @@ -19,6 +19,10 @@ org.apache.spark spark-sql_2.11 + + com.jayway.jsonpath + json-path + eu.dnetlib.dhp diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java deleted file mode 100644 index be1babae2..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityNode.java +++ /dev/null @@ -1,4 +0,0 @@ -package eu.dnetlib.dhp.graph; - -public class EntityNode { -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java index ac89e4351..e8ecc2e30 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java @@ -1,20 +1,30 @@ package eu.dnetlib.dhp.graph; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dnetlib.dhp.schema.oaf.Relation; - import java.io.Serializable; public class EntityRelEntity implements Serializable { private TypedRow source; - private Relation relation; + private TypedRow relation; private TypedRow target; + public EntityRelEntity() { + } + public EntityRelEntity(TypedRow source) { this.source = source; } + + //helpers + public Boolean hasMainEntity() { + return getSource() != null & getRelation() == null & getTarget() == null; + } + + public Boolean hasRelatedEntity() { + return getSource() == null & getRelation() != null & getTarget() != null; + } + + public TypedRow getSource() { return source; } @@ -24,11 +34,11 @@ public class EntityRelEntity implements Serializable { return this; } - public Relation getRelation() { + public TypedRow getRelation() { return relation; } - public EntityRelEntity setRelation(Relation relation) { + public EntityRelEntity setRelation(TypedRow relation) { this.relation = relation; return this; } @@ -42,12 +52,4 @@ public class EntityRelEntity implements Serializable { return this; } - @Override - public String toString() { - try { - return new ObjectMapper().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index 5764642dc..aca436f52 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -1,139 +1,119 @@ package eu.dnetlib.dhp.graph; import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dnetlib.dhp.schema.oaf.*; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.jayway.jsonpath.JsonPath; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.GzipCodec; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; -import org.apache.spark.api.java.Optional; import org.apache.spark.api.java.function.PairFunction; -import org.apache.spark.sql.Encoders; -import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import scala.Tuple2; import java.io.Serializable; +import java.util.List; public class GraphJoiner implements Serializable { - public static final int MAX_RELS = 100; + public static final int MAX_RELS = 10; public void join(final SparkSession spark, final String inputPath, final String hiveDbName, final String outPath) { final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); - /* - JavaPairRDD entities = sc.sequenceFile(inputPath + "/publication", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Publication.class)) - .map(oaf -> new TypedRow("publication", oaf)) - .mapToPair(toPair()); + final String entityIdPath = "$.id"; - */ + JavaPairRDD datasource = readPathEntity(sc, entityIdPath, inputPath, "datasource"); + JavaPairRDD organization = readPathEntity(sc, entityIdPath, inputPath, "organization"); + JavaPairRDD project = readPathEntity(sc, entityIdPath, inputPath, "project"); + JavaPairRDD dataset = readPathEntity(sc, entityIdPath, inputPath, "dataset"); + JavaPairRDD otherresearchproduct = readPathEntity(sc, entityIdPath, inputPath, "otherresearchproduct"); + JavaPairRDD software = readPathEntity(sc, entityIdPath, inputPath, "software"); + JavaPairRDD publication = readPathEntity(sc, entityIdPath, inputPath, "publication"); - JavaPairRDD entities = sc.sequenceFile(inputPath + "/datasource", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Datasource.class)) - .map(oaf -> new TypedRow("datasource", oaf)) - .mapToPair(toPair()) - .union(sc.sequenceFile(inputPath + "/organization", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Organization.class)) - .map(oaf -> new TypedRow("organization", oaf)) - .mapToPair(toPair())) - .union(sc.sequenceFile(inputPath + "/project", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Project.class)) - .map(oaf -> new TypedRow("project", oaf)) - .mapToPair(toPair())) - .union(sc.sequenceFile(inputPath + "/dataset", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Dataset.class)) - .map(oaf -> new TypedRow("dataset", oaf)) - .mapToPair(toPair())) - .union(sc.sequenceFile(inputPath + "/otherresearchproduct", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), OtherResearchProduct.class)) - .map(oaf -> new TypedRow("otherresearchproduct", oaf)) - .mapToPair(toPair())) - .union(sc.sequenceFile(inputPath + "/software", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Software.class)) - .map(oaf -> new TypedRow("software", oaf)) - .mapToPair(toPair())); - /* - .union(sc.sequenceFile(inputPath + "/publication", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Publication.class)) - .map(oaf -> new TypedRow("publication", oaf)) - .mapToPair(toPair())); + final String entitiesPath = outPath + "/entities"; + datasource + .union(organization) + .union(project) + .union(dataset) + .union(otherresearchproduct) + .union(software) + .union(publication) + .map(e -> new EntityRelEntity().setSource(e._2())) + .map(e -> new ObjectMapper().writeValueAsString(e)) + .saveAsTextFile(entitiesPath, GzipCodec.class); - */ + JavaPairRDD entities = sc.textFile(entitiesPath) + .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) + .mapToPair(t -> new Tuple2<>(t.getSource().getSource(), t)); - /* - JavaRDD rels = sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) - .map(item -> new ObjectMapper().readValue(item._2().toString(), Relation.class)) - .map(oaf -> new TypedRow("relation", oaf)) - .mapToPair(toPair()) + final JavaPairRDD relation = readPathRelation(sc, inputPath) + .map(p -> new EntityRelEntity().setRelation(p)) + .mapToPair(p -> new Tuple2<>(p.getRelation().getSource(), p)) .groupByKey() - .map(t -> Iterables.limit(t._2(), MAX_RELS)) - .flatMap(t -> t.iterator()) - .map(t -> (Relation) t.getOaf()); + .map(p -> Iterables.limit(p._2(), MAX_RELS)) + .flatMap(p -> p.iterator()) + .mapToPair(p -> new Tuple2<>(p.getRelation().getTarget(), p)); - spark.createDataset(rels.rdd(), Encoders.bean(Relation.class)) - .write() - .mode(SaveMode.Overwrite) - .saveAsTable(hiveDbName + ".relation_100"); - */ + final String joinByTargetPath = outPath + "/join_by_target"; + relation.join(entities) + .map(s -> new EntityRelEntity() + .setRelation(s._2()._1().getRelation()) + .setTarget(s._2()._2().getSource())) + .map(e -> new ObjectMapper().writeValueAsString(e)) + .saveAsTextFile(joinByTargetPath, GzipCodec.class); - JavaPairRDD bounded_rels = spark.table(hiveDbName + ".relation_" + MAX_RELS) - .as(Encoders.bean(Relation.class)) - .javaRDD() - .map(r -> new TypedRow("relation", r)) - .mapToPair(toPair()); - // build the adjacency list: e -> r - JavaPairRDD>> adjacency_list = entities.leftOuterJoin(bounded_rels); + JavaPairRDD bySource = sc.textFile(joinByTargetPath) + .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) + .mapToPair(t -> new Tuple2<>(t.getRelation().getSource(), t)); - JavaRDD linked_entities = adjacency_list - .mapToPair(toPairTarget()) // make rel.targetid explicit so that we can join it - .leftOuterJoin(entities) // again with the entities to get the target entity - .map(l -> toEntityRelEntity(l)); // and map it to a more readable representation - - spark.createDataFrame(linked_entities, EntityRelEntity.class) - .write() - .mode(SaveMode.Overwrite) - .saveAsTable(hiveDbName + ".linked_entities"); + entities + .union(bySource) + .groupByKey() // by source id + .map(p -> { + final LinkedEntity e = new LinkedEntity(); + final List links = Lists.newArrayList(); + for(EntityRelEntity rel : p._2()) { + if (rel.hasMainEntity() & e.getEntity() == null) { + e.setEntity(rel.getSource()); + } + if (rel.hasRelatedEntity()) { + links.add(new Tuple() + .setRelation(rel.getRelation()) + .setTarget(rel.getTarget())); + } + } + e.setLinks(links); + if (e.getEntity() == null) { + throw new IllegalStateException("missing main entity on '" + p._1() + "'"); + } + return e; + }) + .map(e -> new ObjectMapper().writeValueAsString(e)) + .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); } - private EntityRelEntity toEntityRelEntity(Tuple2>>, Optional>> l) { - // extract the entity source - final EntityRelEntity res = new EntityRelEntity(l._2()._1()._2()._1()); - - if(l._2()._1()._2()._2().isPresent() && l._2()._2().isPresent()) { - - // extract the relationship - res.setRelation((Relation) l._2()._1()._2()._2().get().getOaf()); - - // extract the related entity - res.setTarget(l._2()._2().get()); - } - - return res; + private JavaPairRDD readPathEntity(final JavaSparkContext sc, final String idPath, final String inputPath, final String type) { + return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class) + .mapToPair((PairFunction, String, TypedRow>) item -> { + final String json = item._2().toString(); + final String id = JsonPath.read(json, idPath); + return new Tuple2<>(id, new TypedRow(id, type, json)); + }); } - private PairFunction>>, String, Tuple2>>> toPairTarget() { - return e -> { - Optional o = e._2()._2(); - if (o.isPresent()) { - return new Tuple2<>(((Relation) o.get().getOaf()).getTarget(), e); - } else { - return new Tuple2<>(null, e); - } - }; - } - - private PairFunction toPair() { - return e -> { - if (!"relation".equals(e.getType())) { - return new Tuple2<>( ((OafEntity) e.getOaf()).getId(), e); - } else { - return new Tuple2<>( ((Relation) e.getOaf()).getSource(), e); - } - }; + private JavaRDD readPathRelation(final JavaSparkContext sc, final String inputPath) { + return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) + .map(item -> { + final String json = item._2().toString(); + final String source = JsonPath.read(json, "$.source"); + final String target = JsonPath.read(json, "$.target"); + return new TypedRow(source, target, "relation", json); + }); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java new file mode 100644 index 000000000..9e6fc0d38 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java @@ -0,0 +1,29 @@ +package eu.dnetlib.dhp.graph; + +import java.io.Serializable; +import java.util.List; + +public class LinkedEntity implements Serializable { + + private TypedRow entity; + + private List links; + + public TypedRow getEntity() { + return entity; + } + + public LinkedEntity setEntity(TypedRow entity) { + this.entity = entity; + return this; + } + + public List getLinks() { + return links; + } + + public LinkedEntity setLinks(List links) { + this.links = links; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java deleted file mode 100644 index dbab04f16..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java +++ /dev/null @@ -1,69 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import java.io.Serializable; - -public class RelatedEntity implements Serializable { - - private String relType; - - private String subRelType; - - private String relClass; - - private String type; - - private String payload; - - public RelatedEntity(String relType, String subRelType, String relClass, String type, String payload) { - this.relType = relType; - this.subRelType = subRelType; - this.relClass = relClass; - this.type = type; - this.payload = payload; - } - - public String getRelType() { - return relType; - } - - public RelatedEntity setRelType(String relType) { - this.relType = relType; - return this; - } - - public String getSubRelType() { - return subRelType; - } - - public RelatedEntity setSubRelType(String subRelType) { - this.subRelType = subRelType; - return this; - } - - public String getRelClass() { - return relClass; - } - - public RelatedEntity setRelClass(String relClass) { - this.relClass = relClass; - return this; - } - - public String getType() { - return type; - } - - public RelatedEntity setType(String type) { - this.type = type; - return this; - } - - public String getPayload() { - return payload; - } - - public RelatedEntity setPayload(String payload) { - this.payload = payload; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java index ce8e7e396..1d55dda89 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java @@ -4,21 +4,27 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.spark.SparkConf; import org.apache.spark.sql.SparkSession; public class SparkGraphIndexingJob { - private final static String ENTITY_NODES_PATH = "/tmp/entity_node"; + private final static String OUTPUT_BASE_PATH = "/tmp/openaire_provision"; public static void main(String[] args) throws Exception { final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphIndexingJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); parser.parseArgument(args); + + final SparkConf conf = new SparkConf() + .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + .set("hive.metastore.uris", parser.get("hive_metastore_uris")); + final SparkSession spark = SparkSession .builder() + .config(conf) .appName(SparkGraphIndexingJob.class.getSimpleName()) .master(parser.get("master")) - .config("hive.metastore.uris", parser.get("hive_metastore_uris")) .enableHiveSupport() .getOrCreate(); @@ -26,11 +32,12 @@ public class SparkGraphIndexingJob { final String hiveDbName = parser.get("hive_db_name"); final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); - if (fs.exists(new Path(ENTITY_NODES_PATH))) { - fs.delete(new Path(ENTITY_NODES_PATH), true); + if (fs.exists(new Path(OUTPUT_BASE_PATH))) { + fs.delete(new Path(OUTPUT_BASE_PATH), true); + fs.mkdirs(new Path(OUTPUT_BASE_PATH)); } - new GraphJoiner().join(spark, inputPath, hiveDbName, ENTITY_NODES_PATH); + new GraphJoiner().join(spark, inputPath, hiveDbName, OUTPUT_BASE_PATH); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java new file mode 100644 index 000000000..0b22a63a5 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java @@ -0,0 +1,31 @@ +package eu.dnetlib.dhp.graph; + +import eu.dnetlib.dhp.schema.oaf.Oaf; +import eu.dnetlib.dhp.schema.oaf.Relation; + +import java.io.Serializable; + +public class Tuple implements Serializable { + + private TypedRow relation; + + private TypedRow target; + + public TypedRow getRelation() { + return relation; + } + + public Tuple setRelation(TypedRow relation) { + this.relation = relation; + return this; + } + + public TypedRow getTarget() { + return target; + } + + public Tuple setTarget(TypedRow target) { + this.target = target; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java index 5c933ca80..60c3b64b2 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java @@ -1,20 +1,46 @@ package eu.dnetlib.dhp.graph; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dnetlib.dhp.schema.oaf.Oaf; - import java.io.Serializable; public class TypedRow implements Serializable { - private String type; - private Oaf oaf; - public TypedRow(String type, Oaf oaf) { + private String source; + private String target; + private String type; + private String oaf; + + public TypedRow() { + } + + public TypedRow(String source, String type, String oaf) { + this.source = source; this.type = type; this.oaf = oaf; } + public TypedRow(String source, String target, String type, String oaf) { + this(source, type, oaf); + this.target = target; + } + + public String getSource() { + return source; + } + + public TypedRow setSource(String source) { + this.source = source; + return this; + } + + public String getTarget() { + return target; + } + + public TypedRow setTarget(String target) { + this.target = target; + return this; + } + public String getType() { return type; } @@ -24,21 +50,13 @@ public class TypedRow implements Serializable { return this; } - public Oaf getOaf() { + public String getOaf() { return oaf; } - public TypedRow setOaf(Oaf oaf) { + public TypedRow setOaf(String oaf) { this.oaf = oaf; return this; } - @Override - public String toString() { - try { - return new ObjectMapper().writeValueAsString(this); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml index fcab9dd00..624d3ea76 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/config-default.xml @@ -23,4 +23,12 @@ hive_db_name openaire + + spark2YarnHistoryServerAddress + http://iis-cdh5-test-gw.ocean.icm.edu.pl:18088 + + + spark2EventLogDir + /user/spark/applicationHistory + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml index 00a890268..a91759ade 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -16,6 +16,14 @@ sparkExecutorCores number of cores used by single executor + + spark2YarnHistoryServerAddress + spark 2.* yarn history server address + + + spark2EventLogDir + spark 2.* event log dir location + @@ -33,7 +41,16 @@ GraphIndexing eu.dnetlib.dhp.graph.SparkGraphIndexingJob dhp-graph-provision-${projectVersion}.jar - --executor-memory ${sparkExecutorMemory} --executor-cores ${sparkExecutorCores} --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.yarn.executor.memoryOverhead=${sparkExecutorMemoryOverhead} + + --executor-memory ${sparkExecutorMemory} + --executor-cores ${sparkExecutorCores} + --driver-memory=${sparkDriverMemory} + --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" + --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" + --conf spark.sql.warehouse.dir="/user/hive/warehouse" + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + -mt yarn-cluster --sourcePath${sourcePath} --hive_db_name${hive_db_name} From a55f5fecc63d46e7cc8aa03c7991b08e0cb2966f Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Fri, 24 Jan 2020 08:17:53 +0100 Subject: [PATCH 05/21] joining entities using T x R x S method with groupByKey, WIP: making target objects (T) have lower memory footprint --- .../eu/dnetlib/dhp/graph/EntityRelEntity.java | 1 - .../eu/dnetlib/dhp/graph/GraphJoiner.java | 53 +++-- .../eu/dnetlib/dhp/graph/MappingUtils.java | 103 +++++++++ .../eu/dnetlib/dhp/graph/RelatedEntity.java | 210 ++++++++++++++++++ .../main/java/eu/dnetlib/dhp/graph/Tuple.java | 4 +- .../java/eu/dnetlib/dhp/graph/TypedRow.java | 49 ++-- .../dnetlib/dhp/graph/MappingUtilsTest.java | 42 ++++ .../eu/dnetlib/dhp/graph/datasource.json | 1 + .../eu/dnetlib/dhp/graph/result.json | 1 + 9 files changed, 414 insertions(+), 50 deletions(-) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java create mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java index e8ecc2e30..b0711bbff 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java @@ -51,5 +51,4 @@ public class EntityRelEntity implements Serializable { this.target = target; return this; } - } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index aca436f52..d8641f272 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -3,7 +3,9 @@ package eu.dnetlib.dhp.graph; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; +import eu.dnetlib.dhp.schema.oaf.Qualifier; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.spark.api.java.JavaPairRDD; @@ -24,15 +26,13 @@ public class GraphJoiner implements Serializable { final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); - final String entityIdPath = "$.id"; - - JavaPairRDD datasource = readPathEntity(sc, entityIdPath, inputPath, "datasource"); - JavaPairRDD organization = readPathEntity(sc, entityIdPath, inputPath, "organization"); - JavaPairRDD project = readPathEntity(sc, entityIdPath, inputPath, "project"); - JavaPairRDD dataset = readPathEntity(sc, entityIdPath, inputPath, "dataset"); - JavaPairRDD otherresearchproduct = readPathEntity(sc, entityIdPath, inputPath, "otherresearchproduct"); - JavaPairRDD software = readPathEntity(sc, entityIdPath, inputPath, "software"); - JavaPairRDD publication = readPathEntity(sc, entityIdPath, inputPath, "publication"); + JavaPairRDD datasource = readPathEntity(sc, inputPath, "datasource"); + JavaPairRDD organization = readPathEntity(sc, inputPath, "organization"); + JavaPairRDD project = readPathEntity(sc, inputPath, "project"); + JavaPairRDD dataset = readPathEntity(sc, inputPath, "dataset"); + JavaPairRDD otherresearchproduct = readPathEntity(sc, inputPath, "otherresearchproduct"); + JavaPairRDD software = readPathEntity(sc, inputPath, "software"); + JavaPairRDD publication = readPathEntity(sc, inputPath, "publication"); final String entitiesPath = outPath + "/entities"; datasource @@ -48,28 +48,31 @@ public class GraphJoiner implements Serializable { JavaPairRDD entities = sc.textFile(entitiesPath) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) - .mapToPair(t -> new Tuple2<>(t.getSource().getSource(), t)); + .mapToPair(t -> new Tuple2<>(t.getSource().getSourceId(), t)); final JavaPairRDD relation = readPathRelation(sc, inputPath) + .filter(r -> !r.getDeleted()) .map(p -> new EntityRelEntity().setRelation(p)) - .mapToPair(p -> new Tuple2<>(p.getRelation().getSource(), p)) + .mapToPair(p -> new Tuple2<>(p.getRelation().getSourceId(), p)) .groupByKey() .map(p -> Iterables.limit(p._2(), MAX_RELS)) .flatMap(p -> p.iterator()) - .mapToPair(p -> new Tuple2<>(p.getRelation().getTarget(), p)); + .mapToPair(p -> new Tuple2<>(p.getRelation().getTargetId(), p)); final String joinByTargetPath = outPath + "/join_by_target"; - relation.join(entities) + relation + .join(entities + .filter(e -> !e._2().getSource().getDeleted()) + /*.mapToPair(e -> new Tuple2<>(e._1(), new MappingUtils().pruneModel(e._2())))*/) .map(s -> new EntityRelEntity() .setRelation(s._2()._1().getRelation()) .setTarget(s._2()._2().getSource())) .map(e -> new ObjectMapper().writeValueAsString(e)) .saveAsTextFile(joinByTargetPath, GzipCodec.class); - JavaPairRDD bySource = sc.textFile(joinByTargetPath) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) - .mapToPair(t -> new Tuple2<>(t.getRelation().getSource(), t)); + .mapToPair(t -> new Tuple2<>(t.getRelation().getSourceId(), t)); entities .union(bySource) @@ -97,12 +100,17 @@ public class GraphJoiner implements Serializable { .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); } - private JavaPairRDD readPathEntity(final JavaSparkContext sc, final String idPath, final String inputPath, final String type) { + private JavaPairRDD readPathEntity(final JavaSparkContext sc, final String inputPath, final String type) { return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class) .mapToPair((PairFunction, String, TypedRow>) item -> { + final String json = item._2().toString(); - final String id = JsonPath.read(json, idPath); - return new Tuple2<>(id, new TypedRow(id, type, json)); + final String id = JsonPath.read(json, "$.id"); + return new Tuple2<>(id, new TypedRow() + .setSourceId(id) + .setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference")) + .setType(type) + .setOaf(json)); }); } @@ -110,9 +118,12 @@ public class GraphJoiner implements Serializable { return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) .map(item -> { final String json = item._2().toString(); - final String source = JsonPath.read(json, "$.source"); - final String target = JsonPath.read(json, "$.target"); - return new TypedRow(source, target, "relation", json); + return new TypedRow() + .setSourceId(JsonPath.read(json, "$.source")) + .setTargetId(JsonPath.read(json, "$.target")) + .setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference")) + .setType("relation") + .setOaf(json); }); } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java new file mode 100644 index 000000000..756506c12 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java @@ -0,0 +1,103 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import net.minidev.json.JSONArray; + +import java.util.LinkedHashMap; +import java.util.stream.Collectors; + +public class MappingUtils { + + public EntityRelEntity pruneModel(EntityRelEntity e) throws JsonProcessingException { + + final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); + final RelatedEntity re = new RelatedEntity(); + + switch (e.getSource().getType()) { + case "publication": + case "dataset": + case "otherresearchproduct": + case "software": + + mapTitle(j, re); + re.setDateofacceptance(j.read("$.dateofacceptance.value")); + re.setPublisher(j.read("$.publisher.value")); + + JSONArray pids = j.read("$.pid"); + re.setPid(pids.stream() + .map(p -> asStructuredProperty((LinkedHashMap) p)) + .collect(Collectors.toList())); + + re.setResulttype(asQualifier(j.read("$.resulttype"))); + + JSONArray collfrom = j.read("$.collectedfrom"); + re.setCollectedfrom(collfrom.stream() + .map(c -> asKV((LinkedHashMap)c)) + .collect(Collectors.toList())); + + //TODO still to be mapped + //re.setCodeRepositoryUrl(j.read("$.coderepositoryurl")); + + break; + case "datasource": + re.setOfficialname(j.read("$.officialname.value")); + re.setWebsiteurl(j.read("$.websiteurl.value")); + + re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); + re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); + + break; + case "organization": + + break; + case "project": + mapTitle(j, re); + break; + } + + return new EntityRelEntity().setSource( + new TypedRow() + .setSourceId(e.getSource().getSourceId()) + .setDeleted(e.getSource().getDeleted()) + .setType(e.getSource().getType()) + .setOaf(new ObjectMapper().writeValueAsString(re))); + } + + private KeyValue asKV(LinkedHashMap j) { + final KeyValue kv = new KeyValue(); + kv.setKey((String) j.get("key")); + kv.setValue((String) j.get("value")); + return kv; + } + + private void mapTitle(DocumentContext j, RelatedEntity re) { + JSONArray a = j.read("$.title"); + if (!a.isEmpty()) { + re.setTitle(asStructuredProperty((LinkedHashMap) a.get(0))); + } + } + + private StructuredProperty asStructuredProperty(LinkedHashMap j) { + final StructuredProperty sp = new StructuredProperty(); + sp.setValue((String) j.get("value")); + sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); + return sp; + + } + + public Qualifier asQualifier(LinkedHashMap j) { + Qualifier q = new Qualifier(); + q.setClassid(j.get("classid")); + q.setClassname(j.get("classname")); + q.setSchemeid(j.get("schemeid")); + q.setSchemename(j.get("schemename")); + return q; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java new file mode 100644 index 000000000..a441392b2 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java @@ -0,0 +1,210 @@ +package eu.dnetlib.dhp.graph; + +import eu.dnetlib.dhp.schema.oaf.KeyValue; +import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.StructuredProperty; + +import java.io.Serializable; +import java.util.List; + +public class RelatedEntity implements Serializable { + + // results + private StructuredProperty title; // also for projects + private String dateofacceptance; + private String publisher; + private List pid; + private String codeRepositoryUrl; + private Qualifier resulttype; + private List collectedfrom; + + // datasource + private String officialname; + private String websiteurl; // also for organizations, projects + private Qualifier datasourcetype; + private Qualifier datasourcetypeui; + //private String aggregatortype; + private Qualifier openairecompatibility; + + // organization + private String legalname; + private String legalshortname; + private Qualifier country; + + // project + private String code; + private String acronym; + private Qualifier contracttype; + private String fundingtree; + + public StructuredProperty getTitle() { + return title; + } + + public RelatedEntity setTitle(StructuredProperty title) { + this.title = title; + return this; + } + + public String getDateofacceptance() { + return dateofacceptance; + } + + public RelatedEntity setDateofacceptance(String dateofacceptance) { + this.dateofacceptance = dateofacceptance; + return this; + } + + public String getPublisher() { + return publisher; + } + + public RelatedEntity setPublisher(String publisher) { + this.publisher = publisher; + return this; + } + + public List getPid() { + return pid; + } + + public RelatedEntity setPid(List pid) { + this.pid = pid; + return this; + } + + public String getCodeRepositoryUrl() { + return codeRepositoryUrl; + } + + public RelatedEntity setCodeRepositoryUrl(String codeRepositoryUrl) { + this.codeRepositoryUrl = codeRepositoryUrl; + return this; + } + + public Qualifier getResulttype() { + return resulttype; + } + + public RelatedEntity setResulttype(Qualifier resulttype) { + this.resulttype = resulttype; + return this; + } + + public List getCollectedfrom() { + return collectedfrom; + } + + public RelatedEntity setCollectedfrom(List collectedfrom) { + this.collectedfrom = collectedfrom; + return this; + } + + public String getOfficialname() { + return officialname; + } + + public RelatedEntity setOfficialname(String officialname) { + this.officialname = officialname; + return this; + } + + public String getWebsiteurl() { + return websiteurl; + } + + public RelatedEntity setWebsiteurl(String websiteurl) { + this.websiteurl = websiteurl; + return this; + } + + public Qualifier getDatasourcetype() { + return datasourcetype; + } + + public RelatedEntity setDatasourcetype(Qualifier datasourcetype) { + this.datasourcetype = datasourcetype; + return this; + } + + public Qualifier getDatasourcetypeui() { + return datasourcetypeui; + } + + public RelatedEntity setDatasourcetypeui(Qualifier datasourcetypeui) { + this.datasourcetypeui = datasourcetypeui; + return this; + } + + public Qualifier getOpenairecompatibility() { + return openairecompatibility; + } + + public RelatedEntity setOpenairecompatibility(Qualifier openairecompatibility) { + this.openairecompatibility = openairecompatibility; + return this; + } + + public String getLegalname() { + return legalname; + } + + public RelatedEntity setLegalname(String legalname) { + this.legalname = legalname; + return this; + } + + public String getLegalshortname() { + return legalshortname; + } + + public RelatedEntity setLegalshortname(String legalshortname) { + this.legalshortname = legalshortname; + return this; + } + + public Qualifier getCountry() { + return country; + } + + public RelatedEntity setCountry(Qualifier country) { + this.country = country; + return this; + } + + public String getCode() { + return code; + } + + public RelatedEntity setCode(String code) { + this.code = code; + return this; + } + + public String getAcronym() { + return acronym; + } + + public RelatedEntity setAcronym(String acronym) { + this.acronym = acronym; + return this; + } + + public Qualifier getContracttype() { + return contracttype; + } + + public RelatedEntity setContracttype(Qualifier contracttype) { + this.contracttype = contracttype; + return this; + } + + public String getFundingtree() { + return fundingtree; + } + + public RelatedEntity setFundingtree(String fundingtree) { + this.fundingtree = fundingtree; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java index 0b22a63a5..1eb0491a7 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java @@ -1,8 +1,5 @@ package eu.dnetlib.dhp.graph; -import eu.dnetlib.dhp.schema.oaf.Oaf; -import eu.dnetlib.dhp.schema.oaf.Relation; - import java.io.Serializable; public class Tuple implements Serializable { @@ -11,6 +8,7 @@ public class Tuple implements Serializable { private TypedRow target; + public TypedRow getRelation() { return relation; } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java index 60c3b64b2..1acbbce93 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java @@ -4,40 +4,40 @@ import java.io.Serializable; public class TypedRow implements Serializable { - private String source; - private String target; + private String sourceId; + + private String targetId; + + private Boolean deleted; + private String type; + private String oaf; - public TypedRow() { + public String getSourceId() { + return sourceId; } - public TypedRow(String source, String type, String oaf) { - this.source = source; - this.type = type; - this.oaf = oaf; - } - - public TypedRow(String source, String target, String type, String oaf) { - this(source, type, oaf); - this.target = target; - } - - public String getSource() { - return source; - } - - public TypedRow setSource(String source) { - this.source = source; + public TypedRow setSourceId(String sourceId) { + this.sourceId = sourceId; return this; } - public String getTarget() { - return target; + public String getTargetId() { + return targetId; } - public TypedRow setTarget(String target) { - this.target = target; + public TypedRow setTargetId(String targetId) { + this.targetId = targetId; + return this; + } + + public Boolean getDeleted() { + return deleted; + } + + public TypedRow setDeleted(Boolean deleted) { + this.deleted = deleted; return this; } @@ -58,5 +58,4 @@ public class TypedRow implements Serializable { this.oaf = oaf; return this; } - } diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java new file mode 100644 index 000000000..2edb0aa70 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java @@ -0,0 +1,42 @@ +package eu.dnetlib.dhp.graph; + +import org.codehaus.jackson.map.ObjectMapper; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.io.InputStreamReader; + +public class MappingUtilsTest { + + private MappingUtils utils; + + @Before + public void setUp() { + utils = new MappingUtils(); + } + + @Test + public void testOafMappingDatasource() throws IOException { + + final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("datasource.json")); + final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); + e.getSource().setType("datasource"); + + final EntityRelEntity out = utils.pruneModel(e); + System.out.println(out); + + } + + @Test + public void testOafMappinResult() throws IOException { + + final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json")); + final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); + e.getSource().setType("otherresearchproduct"); + + final EntityRelEntity out = utils.pruneModel(e); + System.out.println(out); + + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json new file mode 100644 index 000000000..c26154c1e --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json @@ -0,0 +1 @@ +{"source":{"sourceId":"10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556","targetId":null,"deleted":false,"oaf":"{\"datasourcetype\":{\"classid\":\"crissystem\",\"classname\":\"CRIS System\",\"schemeid\":\"dnet:datasource_typologies\",\"schemename\":\"dnet:datasource_typologies\"},\"openairecompatibility\":{\"classid\":\"openaire-cris_1.1\",\"classname\":\"OpenAIRE CRIS v1.1\",\"schemeid\":\"dnet:datasourceCompatibilityLevel\",\"schemename\":\"dnet:datasourceCompatibilityLevel\"},\"officialname\":{\"value\":\"CRIS UNS (Current Research Information System University of Novi Sad)\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"englishname\":{\"value\":\"CRIS UNS (Current Research Information System University of Novi Sad)\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"websiteurl\":{\"value\":\"https://cris.uns.ac.rs/\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"logourl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"contactemail\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"namespaceprefix\":{\"value\":\"CrisUnsNoviS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"latitude\":{\"value\":\"0.0\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"longitude\":{\"value\":\"0.0\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"dateofvalidation\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"description\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"subjects\":[],\"odnumberofitems\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odnumberofitemsdate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odpolicies\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odlanguages\":[],\"odcontenttypes\":[],\"accessinfopackage\":[{\"value\":\"https://cris.uns.ac.rs/OAIHandlerOpenAIRECRIS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"releasestartdate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"releaseenddate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"missionstatementurl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"dataprovider\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"serviceprovider\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"databaseaccesstype\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"datauploadtype\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"databaseaccessrestriction\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"datauploadrestriction\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"versioning\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"citationguidelineurl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"qualitymanagementkind\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"pidsystems\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"certificates\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"policies\":[],\"journal\":{\"name\":\"\",\"issnPrinted\":\"\",\"issnOnline\":\"\",\"issnLinking\":\"\",\"ep\":\"\",\"iss\":\"\",\"sp\":\"\",\"vol\":\"\",\"edition\":\"\",\"conferenceplace\":\"\",\"conferencedate\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"id\":\"10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556\",\"originalId\":[\"CRIS_UNS____::openaire\"],\"collectedfrom\":[{\"key\":\"\",\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"pid\":[],\"extraInfo\":[],\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"sysimport:crosswalk:entityregistry\",\"classname\":\"sysimport:crosswalk:entityregistry\",\"schemeid\":\"dnet:provenance_actions\",\"schemename\":\"dnet:provenance_actions\"}},\"lastupdatetimestamp\":0}"},"relation":null,"target":null} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json new file mode 100644 index 000000000..5d6c3f29b --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json @@ -0,0 +1 @@ +{"source":{"sourceId":"50|od_______165::5642f79c597bac8029fde12a80f75412","targetId":null,"deleted":true,"type":"otherresearchproduct","oaf":"{\"contactperson\":[],\"contactgroup\":[],\"tool\":[],\"author\":[{\"fullname\":\"Cartier, Adrien\",\"name\":\"Adrien\",\"surname\":\"Cartier\",\"rank\":1,\"pid\":[],\"affiliation\":[]},{\"fullname\":\"Larroudé, Philippe\",\"name\":\"Philippe\",\"surname\":\"Larroudé\",\"rank\":2,\"pid\":[],\"affiliation\":[]},{\"fullname\":\"Héquette, Arnaud\",\"name\":\"Arnaud\",\"surname\":\"Héquette\",\"rank\":3,\"pid\":[],\"affiliation\":[]}],\"resulttype\":{\"classid\":\"other\",\"classname\":\"other\",\"schemeid\":\"dnet:result_typologies\",\"schemename\":\"dnet:result_typologies\"},\"language\":{\"classid\":\"eng\",\"classname\":\"English\",\"schemeid\":\"dnet:languages\",\"schemename\":\"dnet:languages\"},\"country\":[],\"subject\":[{\"value\":\"[SDU.STU.OC] Sciences of the Universe/Earth Sciences/Oceanography\",\"qualifier\":{\"classid\":\"keyword\",\"classname\":\"keyword\",\"schemeid\":\"dnet:subject_classification_typologies\",\"schemename\":\"dnet:subject_classification_typologies\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"title\":[{\"value\":\"Longshore Sediment Transport Measurements on Sandy Macrotidal Beaches Compared with Sediment Transport Formulae\",\"qualifier\":{\"classid\":\"main title\",\"classname\":\"main title\",\"schemeid\":\"dnet:dataCite_title\",\"schemename\":\"dnet:dataCite_title\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"relevantdate\":[],\"description\":[{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"dateofacceptance\":{\"value\":\"2013-03-13\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"publisher\":{\"value\":\"intech\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"embargoenddate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"source\":[{\"value\":\"Sediment Transport Processes and Their Modelling Applications\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"https://hal.archives-ouvertes.fr/hal-00824453\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Sediment Transport Processes and Their Modelling Applications, intech, chapitre 2, 2013, 978-953-51-1039-2,. \\u0026lt;10.5772/51023\\u0026gt;\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"fulltext\":[],\"format\":[],\"contributor\":[{\"value\":\"Equipe Morphodynamique des littoraux (Dunkerque) ; Laboratoire d\\u0027Océanologie et de Géosciences (LOG) ; Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS - Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Laboratoire des écoulements géophysiques et industriels (LEGI) ; Université Joseph Fourier - Grenoble I - Institut polytechnique de Grenoble (Grenoble INP) - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Laboratoire d\\u0027Océanologie et de Géosciences (LOG) ; Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"resourcetype\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"},\"coverage\":[],\"refereed\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"context\":[],\"id\":\"50|od_______165::5642f79c597bac8029fde12a80f75412\",\"originalId\":[\"oai:HAL:hal-00824453v1\"],\"collectedfrom\":[{\"key\":\"10|opendoar____::9766527f2b5d3e95d4a733fcfb77bd7e\",\"value\":\"INRIA a CCSD electronic archive server\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"pid\":[{\"value\":\"10.5772/51023\",\"qualifier\":{\"classid\":\"doi\",\"classname\":\"doi\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"extraInfo\":[],\"dataInfo\":{\"invisible\":false,\"inferred\":true,\"deletedbyinference\":true,\"inferenceprovenance\":\"dedup-similarity-result-levenstein\",\"provenanceaction\":{\"classid\":\"sysimport:crosswalk:repository\",\"classname\":\"sysimport:crosswalk:repository\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"}},\"lastupdatetimestamp\":0}"},"relation":null,"target":null} \ No newline at end of file From fcbc4ccd70b7edfdb8c041dc9dbdaed5943fa13a Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Fri, 24 Jan 2020 08:43:23 +0100 Subject: [PATCH 06/21] a bit of docs doesn't hurt --- .../eu/dnetlib/dhp/graph/GraphJoiner.java | 40 ++++++++++++++++++- 1 file changed, 38 insertions(+), 2 deletions(-) diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index d8641f272..96d1f150a 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -18,6 +18,24 @@ import scala.Tuple2; import java.io.Serializable; import java.util.List; +/** + * Joins the graph nodes by resolving the links of distance = 1 to create an adjacency list of linked objects. + * The operation considers all the entity types (publication, dataset, software, ORP, project, datasource, organization, + * and all the possible relationships (similarity links produced by the Dedup process are excluded). + * + * The operation is implemented creating the union between the entity types (E), joined by the relationships (R), and again + * by E, finally grouped by E.id; + * + * Different manipulations of the E and R sets are introduced to reduce the complexity of the operation + * 1) treat the object payload as string, extracting only the necessary information beforehand using json path, + * it seems that deserializing it with jackson's object mapper has higher memory footprint. + * + * 2) only consider rels that are not virtually deleted ($.dataInfo.deletedbyinference == false) + * 3) we only need a subset of fields from the related entities, so we introduce a distinction between E_source = S + * and E_target = T. Objects in T are heavily pruned by all the unnecessary information + * + * 4) perform the join as (((T join R) union S) groupby S.id) yield S -> [ ] + */ public class GraphJoiner implements Serializable { public static final int MAX_RELS = 10; @@ -26,6 +44,7 @@ public class GraphJoiner implements Serializable { final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + // read each entity JavaPairRDD datasource = readPathEntity(sc, inputPath, "datasource"); JavaPairRDD organization = readPathEntity(sc, inputPath, "organization"); JavaPairRDD project = readPathEntity(sc, inputPath, "project"); @@ -34,6 +53,7 @@ public class GraphJoiner implements Serializable { JavaPairRDD software = readPathEntity(sc, inputPath, "software"); JavaPairRDD publication = readPathEntity(sc, inputPath, "publication"); + // create the union between all the entities final String entitiesPath = outPath + "/entities"; datasource .union(organization) @@ -50,8 +70,9 @@ public class GraphJoiner implements Serializable { .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .mapToPair(t -> new Tuple2<>(t.getSource().getSourceId(), t)); + // reads the relationships final JavaPairRDD relation = readPathRelation(sc, inputPath) - .filter(r -> !r.getDeleted()) + .filter(r -> !r.getDeleted()) //only consider those that are not virtually deleted .map(p -> new EntityRelEntity().setRelation(p)) .mapToPair(p -> new Tuple2<>(p.getRelation().getSourceId(), p)) .groupByKey() @@ -98,8 +119,16 @@ public class GraphJoiner implements Serializable { }) .map(e -> new ObjectMapper().writeValueAsString(e)) .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); - } + } + /** + * Reads a set of eu.dnetlib.dhp.schema.oaf.OafEntity objects from a sequence file , + * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.TypedRow + * @param sc + * @param inputPath + * @param type + * @return the JavaPairRDD indexed by entity identifier + */ private JavaPairRDD readPathEntity(final JavaSparkContext sc, final String inputPath, final String type) { return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class) .mapToPair((PairFunction, String, TypedRow>) item -> { @@ -114,6 +143,13 @@ public class GraphJoiner implements Serializable { }); } + /** + * Reads a set of eu.dnetlib.dhp.schema.oaf.Relation objects from a sequence file , + * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.TypedRow + * @param sc + * @param inputPath + * @return the JavaRDD containing all the relationships + */ private JavaRDD readPathRelation(final JavaSparkContext sc, final String inputPath) { return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) .map(item -> { From 8c2aff99b01b555581b273640d4c7198ee6eaac8 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Wed, 29 Jan 2020 15:40:33 +0100 Subject: [PATCH 07/21] joining entities using T x R x S, WIP: last representation based on LinkedEntity type --- .../eu/dnetlib/dhp/graph/GraphJoiner.java | 71 +++++++++-------- .../eu/dnetlib/dhp/graph/GraphMapper.java | 77 +++++++++++++++++++ .../main/java/eu/dnetlib/dhp/graph/Link.java | 30 ++++++++ .../eu/dnetlib/dhp/graph/LinkedEntity.java | 25 ++++-- .../dhp/graph/LinkedEntityWrapper.java | 40 ++++++++++ .../eu/dnetlib/dhp/graph/MappingUtils.java | 49 ++++++++---- .../eu/dnetlib/dhp/graph/RelatedEntity.java | 55 +++++++++++-- .../dhp/graph/SparkGraphIndexingJob.java | 1 + .../graph/{Tuple.java => TupleWrapper.java} | 6 +- .../dnetlib/dhp/graph/MappingUtilsTest.java | 10 +++ .../eu/dnetlib/dhp/graph/related_entity.json | 5 ++ 11 files changed, 309 insertions(+), 60 deletions(-) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{Tuple.java => TupleWrapper.java} (70%) create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index 96d1f150a..110649522 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -1,11 +1,14 @@ package eu.dnetlib.dhp.graph; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; -import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.*; +import net.minidev.json.JSONArray; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.spark.api.java.JavaPairRDD; @@ -15,8 +18,10 @@ import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.sql.SparkSession; import scala.Tuple2; +import java.io.IOException; import java.io.Serializable; import java.util.List; +import java.util.stream.Collectors; /** * Joins the graph nodes by resolving the links of distance = 1 to create an adjacency list of linked objects. @@ -38,7 +43,7 @@ import java.util.List; */ public class GraphJoiner implements Serializable { - public static final int MAX_RELS = 10; + public static final int MAX_RELS = 100; public void join(final SparkSession spark, final String inputPath, final String hiveDbName, final String outPath) { @@ -63,7 +68,7 @@ public class GraphJoiner implements Serializable { .union(software) .union(publication) .map(e -> new EntityRelEntity().setSource(e._2())) - .map(e -> new ObjectMapper().writeValueAsString(e)) + .map(MappingUtils::serialize) .saveAsTextFile(entitiesPath, GzipCodec.class); JavaPairRDD entities = sc.textFile(entitiesPath) @@ -84,41 +89,24 @@ public class GraphJoiner implements Serializable { relation .join(entities .filter(e -> !e._2().getSource().getDeleted()) - /*.mapToPair(e -> new Tuple2<>(e._1(), new MappingUtils().pruneModel(e._2())))*/) + .mapToPair(e -> new Tuple2<>(e._1(), MappingUtils.pruneModel(e._2())))) .map(s -> new EntityRelEntity() .setRelation(s._2()._1().getRelation()) .setTarget(s._2()._2().getSource())) - .map(e -> new ObjectMapper().writeValueAsString(e)) + .map(MappingUtils::serialize) .saveAsTextFile(joinByTargetPath, GzipCodec.class); JavaPairRDD bySource = sc.textFile(joinByTargetPath) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .mapToPair(t -> new Tuple2<>(t.getRelation().getSourceId(), t)); + final String linkedEntitiesPath = outPath + "/linked_entities"; entities .union(bySource) .groupByKey() // by source id - .map(p -> { - final LinkedEntity e = new LinkedEntity(); - final List links = Lists.newArrayList(); - for(EntityRelEntity rel : p._2()) { - if (rel.hasMainEntity() & e.getEntity() == null) { - e.setEntity(rel.getSource()); - } - if (rel.hasRelatedEntity()) { - links.add(new Tuple() - .setRelation(rel.getRelation()) - .setTarget(rel.getTarget())); - } - } - e.setLinks(links); - if (e.getEntity() == null) { - throw new IllegalStateException("missing main entity on '" + p._1() + "'"); - } - return e; - }) - .map(e -> new ObjectMapper().writeValueAsString(e)) - .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); + .map(GraphJoiner::asLinkedEntityWrapper) + .map(MappingUtils::serialize) + .saveAsTextFile(linkedEntitiesPath, GzipCodec.class); } /** @@ -153,14 +141,35 @@ public class GraphJoiner implements Serializable { private JavaRDD readPathRelation(final JavaSparkContext sc, final String inputPath) { return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) .map(item -> { - final String json = item._2().toString(); + final String s = item._2().toString(); + final DocumentContext json = JsonPath.parse(s); return new TypedRow() - .setSourceId(JsonPath.read(json, "$.source")) - .setTargetId(JsonPath.read(json, "$.target")) - .setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference")) + .setSourceId(json.read("$.source")) + .setTargetId(json.read("$.target")) + .setDeleted(json.read("$.dataInfo.deletedbyinference")) .setType("relation") - .setOaf(json); + .setOaf(s); }); } + private static LinkedEntityWrapper asLinkedEntityWrapper(Tuple2> p) { + final LinkedEntityWrapper e = new LinkedEntityWrapper(); + final List links = Lists.newArrayList(); + for (EntityRelEntity rel : p._2()) { + if (rel.hasMainEntity() & e.getEntity() == null) { + e.setEntity(rel.getSource()); + } + if (rel.hasRelatedEntity()) { + links.add(new TupleWrapper() + .setRelation(rel.getRelation()) + .setTarget(rel.getTarget())); + } + } + e.setLinks(links); + if (e.getEntity() == null) { + throw new IllegalStateException("missing main entity on '" + p._1() + "'"); + } + return e; + } + } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java new file mode 100644 index 000000000..bdfea7979 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java @@ -0,0 +1,77 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.dnetlib.dhp.schema.oaf.*; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.sql.SparkSession; + +import java.io.IOException; +import java.util.stream.Collectors; + +public class GraphMapper { + + + public void map(final SparkSession spark, final String outPath) { + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + sc.textFile(outPath + "/linked_entities") + .map(LinkedEntityWrapper::parse) + .map(GraphMapper::asLinkedEntity) + .map(e -> new ObjectMapper().writeValueAsString(e)) + .saveAsTextFile(outPath + "/linked_entities_types"); + } + + private static LinkedEntity asLinkedEntity(final LinkedEntityWrapper lw) throws JsonProcessingException { + final LinkedEntity le = new LinkedEntity(); + + try { + le.setType(lw.getEntity().getType()); + le.setEntity(parseEntity(lw.getEntity().getOaf(), le.getType())); + le.setLinks(lw.getLinks() + .stream() + .map(l -> new Link() + .setRelation(parseRelation(l.getRelation().getOaf())) + .setRelatedEntity(RelatedEntity.parse(l.getTarget().getOaf()))) + .collect(Collectors.toList())); + return le; + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException(new ObjectMapper().writeValueAsString(lw), e); + } + } + + private static Relation parseRelation(final String s) { + try { + return new ObjectMapper().readValue(s, Relation.class); + } catch (IOException e) { + throw new IllegalArgumentException("unable to decode Relation: " + s); + } + } + + private static OafEntity parseEntity(final String json, final String type) { + final ObjectMapper o = new ObjectMapper(); + try { + switch (type) { + case "publication": + return o.readValue(json, Publication.class); + case "dataset": + return o.readValue(json, Dataset.class); + case "otherresearchproduct": + return o.readValue(json, OtherResearchProduct.class); + case "software": + return o.readValue(json, Software.class); + case "datasource": + return o.readValue(json, Datasource.class); + case "project": + return o.readValue(json, Project.class); + case "organization": + return o.readValue(json, Organization.class); + default: + throw new IllegalArgumentException("invalid entity type: " + type); + } + } catch (IOException e) { + throw new IllegalArgumentException("unable to decode oaf entity: " + json); + } + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java new file mode 100644 index 000000000..8426fbd12 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java @@ -0,0 +1,30 @@ +package eu.dnetlib.dhp.graph; + +import eu.dnetlib.dhp.schema.oaf.Relation; + +import java.io.Serializable; + +public class Link implements Serializable { + + private Relation relation; + + private RelatedEntity relatedEntity; + + public Relation getRelation() { + return relation; + } + + public Link setRelation(Relation relation) { + this.relation = relation; + return this; + } + + public RelatedEntity getRelatedEntity() { + return relatedEntity; + } + + public Link setRelatedEntity(RelatedEntity relatedEntity) { + this.relatedEntity = relatedEntity; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java index 9e6fc0d38..c7c2d1892 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java @@ -1,28 +1,41 @@ package eu.dnetlib.dhp.graph; +import eu.dnetlib.dhp.schema.oaf.OafEntity; + import java.io.Serializable; import java.util.List; public class LinkedEntity implements Serializable { - private TypedRow entity; + private String type; - private List links; + private OafEntity entity; - public TypedRow getEntity() { + private List links; + + public String getType() { + return type; + } + + public LinkedEntity setType(String type) { + this.type = type; + return this; + } + + public OafEntity getEntity() { return entity; } - public LinkedEntity setEntity(TypedRow entity) { + public LinkedEntity setEntity(OafEntity entity) { this.entity = entity; return this; } - public List getLinks() { + public List getLinks() { return links; } - public LinkedEntity setLinks(List links) { + public LinkedEntity setLinks(List links) { this.links = links; return this; } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java new file mode 100644 index 000000000..17853208c --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java @@ -0,0 +1,40 @@ +package eu.dnetlib.dhp.graph; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.IOException; +import java.io.Serializable; +import java.util.List; + +public class LinkedEntityWrapper implements Serializable { + + private TypedRow entity; + + private List links; + + public static LinkedEntityWrapper parse(final String s) { + try { + return new ObjectMapper().readValue(s, LinkedEntityWrapper.class); + } catch (IOException e) { + throw new IllegalArgumentException("unable to decode LinkedEntityWrapper: " + s); + } + } + + public TypedRow getEntity() { + return entity; + } + + public LinkedEntityWrapper setEntity(TypedRow entity) { + this.entity = entity; + return this; + } + + public List getLinks() { + return links; + } + + public LinkedEntityWrapper setLinks(List links) { + this.links = links; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java index 756506c12..9f7ca4d0b 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java @@ -1,5 +1,6 @@ package eu.dnetlib.dhp.graph; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.DocumentContext; @@ -14,17 +15,16 @@ import java.util.stream.Collectors; public class MappingUtils { - public EntityRelEntity pruneModel(EntityRelEntity e) throws JsonProcessingException { + public static EntityRelEntity pruneModel(EntityRelEntity e) { final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); - final RelatedEntity re = new RelatedEntity(); + final RelatedEntity re = new RelatedEntity().setId(j.read("$.id")).setType(e.getSource().getType()); switch (e.getSource().getType()) { case "publication": case "dataset": case "otherresearchproduct": case "software": - mapTitle(j, re); re.setDateofacceptance(j.read("$.dateofacceptance.value")); re.setPublisher(j.read("$.publisher.value")); @@ -48,51 +48,62 @@ public class MappingUtils { case "datasource": re.setOfficialname(j.read("$.officialname.value")); re.setWebsiteurl(j.read("$.websiteurl.value")); - re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); break; case "organization": + re.setLegalname(j.read("$.legalname.value")); + re.setLegalshortname(j.read("$.legalshortname.value")); + re.setCountry(asQualifier(j.read("$.country"))); break; case "project": - mapTitle(j, re); + re.setProjectTitle(j.read("$.title.value")); + re.setCode(j.read("$.code.value")); + re.setAcronym(j.read("$.acronym.value")); + re.setContracttype(asQualifier(j.read("$.contracttype"))); + + JSONArray f = j.read("$.fundingtree"); + if (!f.isEmpty()) { + re.setFundingtree(f.stream() + .map(s -> s.toString()) + .collect(Collectors.toList())); + } + break; } - return new EntityRelEntity().setSource( new TypedRow() .setSourceId(e.getSource().getSourceId()) .setDeleted(e.getSource().getDeleted()) .setType(e.getSource().getType()) - .setOaf(new ObjectMapper().writeValueAsString(re))); + .setOaf(serialize(re))); } - private KeyValue asKV(LinkedHashMap j) { + private static KeyValue asKV(LinkedHashMap j) { final KeyValue kv = new KeyValue(); kv.setKey((String) j.get("key")); kv.setValue((String) j.get("value")); return kv; } - private void mapTitle(DocumentContext j, RelatedEntity re) { - JSONArray a = j.read("$.title"); + private static void mapTitle(DocumentContext j, RelatedEntity re) { + final JSONArray a = j.read("$.title"); if (!a.isEmpty()) { re.setTitle(asStructuredProperty((LinkedHashMap) a.get(0))); } } - private StructuredProperty asStructuredProperty(LinkedHashMap j) { + private static StructuredProperty asStructuredProperty(LinkedHashMap j) { final StructuredProperty sp = new StructuredProperty(); sp.setValue((String) j.get("value")); sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); return sp; - } - public Qualifier asQualifier(LinkedHashMap j) { - Qualifier q = new Qualifier(); + public static Qualifier asQualifier(LinkedHashMap j) { + final Qualifier q = new Qualifier(); q.setClassid(j.get("classid")); q.setClassname(j.get("classname")); q.setSchemeid(j.get("schemeid")); @@ -100,4 +111,14 @@ public class MappingUtils { return q; } + public static String serialize(final Object o) { + try { + return new ObjectMapper() + .setSerializationInclusion(JsonInclude.Include.NON_NULL) + .writeValueAsString(o); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("unable to serialize: " + o.toString(), e); + } + } + } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java index a441392b2..afd6e310b 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java @@ -3,14 +3,22 @@ package eu.dnetlib.dhp.graph; import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import org.codehaus.jackson.map.ObjectMapper; +import java.io.IOException; import java.io.Serializable; import java.util.List; public class RelatedEntity implements Serializable { + private String id; + private String type; + + // common fields + private StructuredProperty title; + private String websiteurl; // datasource, organizations, projects + // results - private StructuredProperty title; // also for projects private String dateofacceptance; private String publisher; private List pid; @@ -20,11 +28,10 @@ public class RelatedEntity implements Serializable { // datasource private String officialname; - private String websiteurl; // also for organizations, projects private Qualifier datasourcetype; private Qualifier datasourcetypeui; - //private String aggregatortype; private Qualifier openairecompatibility; + //private String aggregatortype; // organization private String legalname; @@ -32,10 +39,28 @@ public class RelatedEntity implements Serializable { private Qualifier country; // project + private String projectTitle; private String code; private String acronym; private Qualifier contracttype; - private String fundingtree; + private List fundingtree; + + public static RelatedEntity parse(final String json) { + try { + return new ObjectMapper().readValue(json, RelatedEntity.class); + } catch (IOException e) { + throw new IllegalArgumentException("invalid RelatedEntity, cannot parse: " + json); + } + } + + public String getId() { + return id; + } + + public RelatedEntity setId(String id) { + this.id = id; + return this; + } public StructuredProperty getTitle() { return title; @@ -199,12 +224,30 @@ public class RelatedEntity implements Serializable { return this; } - public String getFundingtree() { + public List getFundingtree() { return fundingtree; } - public RelatedEntity setFundingtree(String fundingtree) { + public RelatedEntity setFundingtree(List fundingtree) { this.fundingtree = fundingtree; return this; } + + public String getProjectTitle() { + return projectTitle; + } + + public RelatedEntity setProjectTitle(String projectTitle) { + this.projectTitle = projectTitle; + return this; + } + + public String getType() { + return type; + } + + public RelatedEntity setType(String type) { + this.type = type; + return this; + } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java index 1d55dda89..3915bef08 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java @@ -38,6 +38,7 @@ public class SparkGraphIndexingJob { } new GraphJoiner().join(spark, inputPath, hiveDbName, OUTPUT_BASE_PATH); + new GraphMapper().map(spark, OUTPUT_BASE_PATH); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java similarity index 70% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java index 1eb0491a7..eb60e1474 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java @@ -2,7 +2,7 @@ package eu.dnetlib.dhp.graph; import java.io.Serializable; -public class Tuple implements Serializable { +public class TupleWrapper implements Serializable { private TypedRow relation; @@ -13,7 +13,7 @@ public class Tuple implements Serializable { return relation; } - public Tuple setRelation(TypedRow relation) { + public TupleWrapper setRelation(TypedRow relation) { this.relation = relation; return this; } @@ -22,7 +22,7 @@ public class Tuple implements Serializable { return target; } - public Tuple setTarget(TypedRow target) { + public TupleWrapper setTarget(TypedRow target) { this.target = target; return this; } diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java index 2edb0aa70..199d12132 100644 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java @@ -39,4 +39,14 @@ public class MappingUtilsTest { System.out.println(out); } + + @Test + public void testParseRelatedEntity() throws IOException { + + final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("related_entity.json")); + final RelatedEntity e = new ObjectMapper().readValue(in, RelatedEntity.class); + + System.out.println(e); + + } } diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json new file mode 100644 index 000000000..25c92baa3 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json @@ -0,0 +1,5 @@ +{ + "id": "20|nih_________::6b8108b6d6399f7163a6a7ccdd0efc2d", + "type": "organization", + "legalname": "MCGILL UNIVERSITY" +} \ No newline at end of file From 1ecca69f499c229b82e25501b70201f760eca4e5 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 30 Jan 2020 17:45:28 +0100 Subject: [PATCH 08/21] added annotation to ignore method during the serialization --- .../eu/dnetlib/dhp/schema/oaf/GeoLocation.java | 3 ++- .../java/eu/dnetlib/dhp/schema/oaf/KeyValue.java | 2 ++ .../java/eu/dnetlib/dhp/schema/oaf/Qualifier.java | 15 +++++++++++---- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/GeoLocation.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/GeoLocation.java index 43af60286..1839fbd53 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/GeoLocation.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/GeoLocation.java @@ -1,5 +1,6 @@ package eu.dnetlib.dhp.schema.oaf; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; import java.io.Serializable; @@ -36,7 +37,7 @@ public class GeoLocation implements Serializable { this.place = place; } - + @JsonIgnore public boolean isBlank() { return StringUtils.isBlank(point) && StringUtils.isBlank(box) && diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/KeyValue.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/KeyValue.java index 74d9f77bd..5a841b96f 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/KeyValue.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/KeyValue.java @@ -1,5 +1,6 @@ package eu.dnetlib.dhp.schema.oaf; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; import java.io.Serializable; @@ -40,6 +41,7 @@ public class KeyValue implements Serializable { return isBlank()?"":String.format("%s::%s", key != null ? key.toLowerCase() : "", value != null ? value.toLowerCase() : ""); } + @JsonIgnore public boolean isBlank() { return StringUtils.isBlank(key) && StringUtils.isBlank(value); } diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java index 7e4660f4b..00ae88c52 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java @@ -1,5 +1,6 @@ package eu.dnetlib.dhp.schema.oaf; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; import java.io.Serializable; @@ -15,32 +16,36 @@ public class Qualifier implements Serializable { return classid; } - public void setClassid(String classid) { + public Qualifier setClassid(String classid) { this.classid = classid; + return this; } public String getClassname() { return classname; } - public void setClassname(String classname) { + public Qualifier setClassname(String classname) { this.classname = classname; + return this; } public String getSchemeid() { return schemeid; } - public void setSchemeid(String schemeid) { + public Qualifier setSchemeid(String schemeid) { this.schemeid = schemeid; + return this; } public String getSchemename() { return schemename; } - public void setSchemename(String schemename) { + public Qualifier setSchemename(String schemename) { this.schemename = schemename; + return this; } public String toComparableString() { @@ -50,6 +55,8 @@ public class Qualifier implements Serializable { schemeid != null ? schemeid : "", schemename != null ? schemename : ""); } + + @JsonIgnore public boolean isBlank() { return StringUtils.isBlank(classid) && StringUtils.isBlank(classname) && From b2691a3b0a09c3d8bb04272e584491f190763f85 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 30 Jan 2020 17:46:29 +0100 Subject: [PATCH 09/21] save adjacency list as JoinedEntity --- .../job-override.properties | 3 +- .../eu/dnetlib/dhp/graph/EntityRelEntity.java | 1 + .../eu/dnetlib/dhp/graph/GraphJoiner.java | 224 ++++++++++++++---- .../dnetlib/dhp/graph/GraphMappingUtils.java | 137 +++++++++++ .../eu/dnetlib/dhp/graph/JoinedEntity.java | 44 ++++ .../eu/dnetlib/dhp/graph/MappingUtils.java | 103 -------- .../eu/dnetlib/dhp/graph/RelatedEntity.java | 57 ++++- ...Job.java => SparkXmlRecordBuilderJob.java} | 23 +- .../dhp/graph/input_graph_parameters.json | 2 +- .../dnetlib/dhp/graph/oozie_app/workflow.xml | 11 +- .../dnetlib/dhp/graph/MappingUtilsTest.java | 4 +- 11 files changed, 427 insertions(+), 182 deletions(-) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{SparkGraphIndexingJob.java => SparkXmlRecordBuilderJob.java} (54%) diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties index 1870b0e6e..acaf16717 100644 --- a/dhp-workflows/dhp-graph-provision/job-override.properties +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -1,4 +1,5 @@ sparkDriverMemory=7G sparkExecutorMemory=7G hive_db_name=claudio -sourcePath=/tmp/db_openaireplus_services_beta.export.2019.11.06 \ No newline at end of file +sourcePath=/tmp/db_openaireplus_services_beta.export.2019.11.06 +outputPath=/tmp/openaire_provision \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java index b0711bbff..285cacbc0 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java @@ -3,6 +3,7 @@ package eu.dnetlib.dhp.graph; import java.io.Serializable; public class EntityRelEntity implements Serializable { + private TypedRow source; private TypedRow relation; private TypedRow target; diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index 96d1f150a..f7bf0da39 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -1,11 +1,12 @@ package eu.dnetlib.dhp.graph; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; -import eu.dnetlib.dhp.schema.oaf.Qualifier; +import eu.dnetlib.dhp.schema.oaf.*; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.GzipCodec; import org.apache.spark.api.java.JavaPairRDD; @@ -15,8 +16,10 @@ import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.sql.SparkSession; import scala.Tuple2; +import java.io.IOException; import java.io.Serializable; import java.util.List; +import java.util.stream.Collectors; /** * Joins the graph nodes by resolving the links of distance = 1 to create an adjacency list of linked objects. @@ -40,21 +43,32 @@ public class GraphJoiner implements Serializable { public static final int MAX_RELS = 10; - public void join(final SparkSession spark, final String inputPath, final String hiveDbName, final String outPath) { + private SparkSession spark; - final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + private String inputPath; + + private String outPath; + + public GraphJoiner(SparkSession spark, String inputPath, String outPath) { + this.spark = spark; + this.inputPath = inputPath; + this.outPath = outPath; + } + + public GraphJoiner adjacencyLists() { + final JavaSparkContext sc = new JavaSparkContext(getSpark().sparkContext()); // read each entity - JavaPairRDD datasource = readPathEntity(sc, inputPath, "datasource"); - JavaPairRDD organization = readPathEntity(sc, inputPath, "organization"); - JavaPairRDD project = readPathEntity(sc, inputPath, "project"); - JavaPairRDD dataset = readPathEntity(sc, inputPath, "dataset"); - JavaPairRDD otherresearchproduct = readPathEntity(sc, inputPath, "otherresearchproduct"); - JavaPairRDD software = readPathEntity(sc, inputPath, "software"); - JavaPairRDD publication = readPathEntity(sc, inputPath, "publication"); + JavaPairRDD datasource = readPathEntity(sc, getInputPath(), "datasource"); + JavaPairRDD organization = readPathEntity(sc, getInputPath(), "organization"); + JavaPairRDD project = readPathEntity(sc, getInputPath(), "project"); + JavaPairRDD dataset = readPathEntity(sc, getInputPath(), "dataset"); + JavaPairRDD otherresearchproduct = readPathEntity(sc, getInputPath(), "otherresearchproduct"); + JavaPairRDD software = readPathEntity(sc, getInputPath(), "software"); + JavaPairRDD publication = readPathEntity(sc, getInputPath(), "publication"); // create the union between all the entities - final String entitiesPath = outPath + "/entities"; + final String entitiesPath = getOutPath() + "/0_entities"; datasource .union(organization) .union(project) @@ -63,7 +77,7 @@ public class GraphJoiner implements Serializable { .union(software) .union(publication) .map(e -> new EntityRelEntity().setSource(e._2())) - .map(e -> new ObjectMapper().writeValueAsString(e)) + .map(GraphMappingUtils::serialize) .saveAsTextFile(entitiesPath, GzipCodec.class); JavaPairRDD entities = sc.textFile(entitiesPath) @@ -71,7 +85,7 @@ public class GraphJoiner implements Serializable { .mapToPair(t -> new Tuple2<>(t.getSource().getSourceId(), t)); // reads the relationships - final JavaPairRDD relation = readPathRelation(sc, inputPath) + final JavaPairRDD relation = readPathRelation(sc, getInputPath()) .filter(r -> !r.getDeleted()) //only consider those that are not virtually deleted .map(p -> new EntityRelEntity().setRelation(p)) .mapToPair(p -> new Tuple2<>(p.getRelation().getSourceId(), p)) @@ -80,45 +94,156 @@ public class GraphJoiner implements Serializable { .flatMap(p -> p.iterator()) .mapToPair(p -> new Tuple2<>(p.getRelation().getTargetId(), p)); - final String joinByTargetPath = outPath + "/join_by_target"; + final String joinByTargetPath = getOutPath() + "/1_join_by_target"; relation .join(entities .filter(e -> !e._2().getSource().getDeleted()) - /*.mapToPair(e -> new Tuple2<>(e._1(), new MappingUtils().pruneModel(e._2())))*/) + .mapToPair(e -> new Tuple2<>(e._1(), new GraphMappingUtils().pruneModel(e._2())))) .map(s -> new EntityRelEntity() .setRelation(s._2()._1().getRelation()) .setTarget(s._2()._2().getSource())) - .map(e -> new ObjectMapper().writeValueAsString(e)) + .map(GraphMappingUtils::serialize) .saveAsTextFile(joinByTargetPath, GzipCodec.class); JavaPairRDD bySource = sc.textFile(joinByTargetPath) .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .mapToPair(t -> new Tuple2<>(t.getRelation().getSourceId(), t)); + final String linkedEntityPath = getOutPath() + "/2_linked_entities"; entities .union(bySource) .groupByKey() // by source id - .map(p -> { - final LinkedEntity e = new LinkedEntity(); - final List links = Lists.newArrayList(); - for(EntityRelEntity rel : p._2()) { - if (rel.hasMainEntity() & e.getEntity() == null) { - e.setEntity(rel.getSource()); - } - if (rel.hasRelatedEntity()) { - links.add(new Tuple() - .setRelation(rel.getRelation()) - .setTarget(rel.getTarget())); - } - } - e.setLinks(links); - if (e.getEntity() == null) { - throw new IllegalStateException("missing main entity on '" + p._1() + "'"); - } - return e; - }) - .map(e -> new ObjectMapper().writeValueAsString(e)) - .saveAsTextFile(outPath + "/linked_entities", GzipCodec.class); + .map(p -> toLinkedEntity(p)) + .map(e -> new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL).writeValueAsString(e)) + .saveAsTextFile(linkedEntityPath, GzipCodec.class); + + final String joinedEntitiesPath = getOutPath() + "/3_joined_entities"; + sc.textFile(linkedEntityPath) + .map(s -> new ObjectMapper().readValue(s, LinkedEntity.class)) + .map(l -> toJoinedEntity(l)) + .map(j -> new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL).writeValueAsString(j)) + .saveAsTextFile(joinedEntitiesPath); + + return this; + } + + public GraphJoiner asXML() { + final JavaSparkContext sc = new JavaSparkContext(getSpark().sparkContext()); + + final String joinedEntitiesPath = getOutPath() + "/3_joined_entities"; + sc.textFile(joinedEntitiesPath) + .map(s -> new ObjectMapper().readValue(s, LinkedEntity.class)) + .map(l -> toXML(l)) + .saveAsTextFile(getOutPath() + "/4_xml"); + + return this; + } + + private String toXML(LinkedEntity l) { + + return null; + } + + public SparkSession getSpark() { + return spark; + } + + public GraphJoiner setSpark(SparkSession spark) { + this.spark = spark; + return this; + } + + public String getInputPath() { + return inputPath; + } + + public GraphJoiner setInputPath(String inputPath) { + this.inputPath = inputPath; + return this; + } + + public String getOutPath() { + return outPath; + } + + public GraphJoiner setOutPath(String outPath) { + this.outPath = outPath; + return this; + } + + // HELPERS + + private OafEntity parseOaf(final String json, final String type) { + final ObjectMapper o = new ObjectMapper(); + try { + switch (type) { + case "publication": + return o.readValue(json, Publication.class); + case "dataset": + return o.readValue(json, Dataset.class); + case "otherresearchproduct": + return o.readValue(json, OtherResearchProduct.class); + case "software": + return o.readValue(json, Software.class); + case "datasource": + return o.readValue(json, Datasource.class); + case "organization": + return o.readValue(json, Organization.class); + case "project": + return o.readValue(json, Project.class); + default: + throw new IllegalArgumentException("invalid type: " + type); + } + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + } + + /** + * Converts the result of grouping pairs and the entities by source id to LinkedEntity + * @param p + * @return + */ + private LinkedEntity toLinkedEntity(Tuple2> p) { + final LinkedEntity e = new LinkedEntity(); + final List links = Lists.newArrayList(); + for(EntityRelEntity rel : p._2()) { + if (rel.hasMainEntity() & e.getEntity() == null) { + e.setEntity(rel.getSource()); + } + if (rel.hasRelatedEntity()) { + links.add(new Tuple() + .setRelation(rel.getRelation()) + .setTarget(rel.getTarget())); + } + } + e.setLinks(links); + if (e.getEntity() == null) { + throw new IllegalStateException("missing main entity on '" + p._1() + "'"); + } + return e; + } + + /** + * Converts a LinkedEntity to a JoinedEntity + * @param l + * @return + */ + private JoinedEntity toJoinedEntity(LinkedEntity l) { + return new JoinedEntity().setType(l.getEntity().getType()) + .setEntity(parseOaf(l.getEntity().getOaf(), l.getEntity().getType())) + .setLinks(l.getLinks() + .stream() + .map(t -> { + final ObjectMapper o = new ObjectMapper(); + try { + return new Tuple2<>( + o.readValue(t.getRelation().getOaf(), Relation.class), + o.readValue(t.getTarget().getOaf(), RelatedEntity.class)); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + }).collect(Collectors.toList())); } /** @@ -132,14 +257,14 @@ public class GraphJoiner implements Serializable { private JavaPairRDD readPathEntity(final JavaSparkContext sc, final String inputPath, final String type) { return sc.sequenceFile(inputPath + "/" + type, Text.class, Text.class) .mapToPair((PairFunction, String, TypedRow>) item -> { - - final String json = item._2().toString(); - final String id = JsonPath.read(json, "$.id"); + final String s = item._2().toString(); + final DocumentContext json = JsonPath.parse(s); + final String id = json.read("$.id"); return new Tuple2<>(id, new TypedRow() - .setSourceId(id) - .setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference")) - .setType(type) - .setOaf(json)); + .setSourceId(id) + .setDeleted(json.read("$.dataInfo.deletedbyinference")) + .setType(type) + .setOaf(s)); }); } @@ -153,13 +278,14 @@ public class GraphJoiner implements Serializable { private JavaRDD readPathRelation(final JavaSparkContext sc, final String inputPath) { return sc.sequenceFile(inputPath + "/relation", Text.class, Text.class) .map(item -> { - final String json = item._2().toString(); + final String s = item._2().toString(); + final DocumentContext json = JsonPath.parse(s); return new TypedRow() - .setSourceId(JsonPath.read(json, "$.source")) - .setTargetId(JsonPath.read(json, "$.target")) - .setDeleted(JsonPath.read(json, "$.dataInfo.deletedbyinference")) + .setSourceId(json.read("$.source")) + .setTargetId(json.read("$.target")) + .setDeleted(json.read("$.dataInfo.deletedbyinference")) .setType("relation") - .setOaf(json); + .setOaf(s); }); } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java index ab19ff2b5..e3622cd20 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java @@ -1,9 +1,18 @@ package eu.dnetlib.dhp.graph; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Maps; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; import eu.dnetlib.dhp.schema.oaf.*; +import net.minidev.json.JSONArray; +import org.apache.commons.lang3.StringUtils; +import java.util.LinkedHashMap; import java.util.Map; +import java.util.stream.Collectors; public class GraphMappingUtils { @@ -20,4 +29,132 @@ public class GraphMappingUtils { types.put("relation", Relation.class); } + public static EntityRelEntity pruneModel(EntityRelEntity e) { + + final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); + final RelatedEntity re = new RelatedEntity().setId(j.read("$.id")).setType(e.getSource().getType()); + + switch (e.getSource().getType()) { + case "publication": + case "dataset": + case "otherresearchproduct": + case "software": + mapTitle(j, re); + re.setDateofacceptance(j.read("$.dateofacceptance.value")); + re.setPublisher(j.read("$.publisher.value")); + + JSONArray pids = j.read("$.pid"); + re.setPid(pids.stream() + .map(p -> asStructuredProperty((LinkedHashMap) p)) + .collect(Collectors.toList())); + + re.setResulttype(asQualifier(j.read("$.resulttype"))); + + JSONArray collfrom = j.read("$.collectedfrom"); + re.setCollectedfrom(collfrom.stream() + .map(c -> asKV((LinkedHashMap)c)) + .collect(Collectors.toList())); + + //TODO still to be mapped + //re.setCodeRepositoryUrl(j.read("$.coderepositoryurl")); + + break; + case "datasource": + re.setOfficialname(j.read("$.officialname.value")); + re.setWebsiteurl(j.read("$.websiteurl.value")); + re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); + re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); + + break; + case "organization": + re.setLegalname(j.read("$.legalname.value")); + re.setLegalshortname(j.read("$.legalshortname.value")); + re.setCountry(asQualifier(j.read("$.country"))); + + break; + case "project": + re.setProjectTitle(j.read("$.title.value")); + re.setCode(j.read("$.code.value")); + re.setAcronym(j.read("$.acronym.value")); + re.setContracttype(asQualifier(j.read("$.contracttype"))); + + JSONArray f = j.read("$.fundingtree"); + if (!f.isEmpty()) { + re.setFundingtree(f.stream() + .map(s -> s.toString()) + .collect(Collectors.toList())); + } + + break; + } + return new EntityRelEntity().setSource( + new TypedRow() + .setSourceId(e.getSource().getSourceId()) + .setDeleted(e.getSource().getDeleted()) + .setType(e.getSource().getType()) + .setOaf(serialize(re))); + } + + private static KeyValue asKV(LinkedHashMap j) { + final KeyValue kv = new KeyValue(); + kv.setKey((String) j.get("key")); + kv.setValue((String) j.get("value")); + return kv; + } + + private static void mapTitle(DocumentContext j, RelatedEntity re) { + final JSONArray a = j.read("$.title"); + if (!a.isEmpty()) { + final StructuredProperty sp = asStructuredProperty((LinkedHashMap) a.get(0)); + if(StringUtils.isNotBlank(sp.getValue())) { + re.setTitle(sp); + } + } + } + + private static StructuredProperty asStructuredProperty(LinkedHashMap j) { + final StructuredProperty sp = new StructuredProperty(); + final String value = (String) j.get("value"); + if (StringUtils.isNotBlank(value)) { + sp.setValue((String) j.get("value")); + sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); + } + return sp; + } + + public static Qualifier asQualifier(LinkedHashMap j) { + final Qualifier q = new Qualifier(); + + final String classid = j.get("classid"); + if (StringUtils.isNotBlank(classid)) { + q.setClassid(classid); + } + + final String classname = j.get("classname"); + if (StringUtils.isNotBlank(classname)) { + q.setClassname(classname); + } + + final String schemeid = j.get("schemeid"); + if (StringUtils.isNotBlank(schemeid)) { + q.setSchemeid(schemeid); + } + + final String schemename = j.get("schemename"); + if (StringUtils.isNotBlank(schemename)) { + q.setSchemename(schemename); + } + return q; + } + + public static String serialize(final Object o) { + try { + return new ObjectMapper() + .setSerializationInclusion(JsonInclude.Include.NON_NULL) + .writeValueAsString(o); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("unable to serialize: " + o.toString(), e); + } + } + } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java new file mode 100644 index 000000000..d65eb64c8 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java @@ -0,0 +1,44 @@ +package eu.dnetlib.dhp.graph; + +import eu.dnetlib.dhp.schema.oaf.OafEntity; +import eu.dnetlib.dhp.schema.oaf.Relation; +import scala.Tuple2; + +import java.io.Serializable; +import java.util.List; + +public class JoinedEntity implements Serializable { + + private String type; + + private OafEntity entity; + + private List> links; + + public String getType() { + return type; + } + + public JoinedEntity setType(String type) { + this.type = type; + return this; + } + + public OafEntity getEntity() { + return entity; + } + + public JoinedEntity setEntity(OafEntity entity) { + this.entity = entity; + return this; + } + + public List> getLinks() { + return links; + } + + public JoinedEntity setLinks(List> links) { + this.links = links; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java deleted file mode 100644 index 756506c12..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/MappingUtils.java +++ /dev/null @@ -1,103 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.jayway.jsonpath.DocumentContext; -import com.jayway.jsonpath.JsonPath; -import eu.dnetlib.dhp.schema.oaf.KeyValue; -import eu.dnetlib.dhp.schema.oaf.Qualifier; -import eu.dnetlib.dhp.schema.oaf.StructuredProperty; -import net.minidev.json.JSONArray; - -import java.util.LinkedHashMap; -import java.util.stream.Collectors; - -public class MappingUtils { - - public EntityRelEntity pruneModel(EntityRelEntity e) throws JsonProcessingException { - - final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); - final RelatedEntity re = new RelatedEntity(); - - switch (e.getSource().getType()) { - case "publication": - case "dataset": - case "otherresearchproduct": - case "software": - - mapTitle(j, re); - re.setDateofacceptance(j.read("$.dateofacceptance.value")); - re.setPublisher(j.read("$.publisher.value")); - - JSONArray pids = j.read("$.pid"); - re.setPid(pids.stream() - .map(p -> asStructuredProperty((LinkedHashMap) p)) - .collect(Collectors.toList())); - - re.setResulttype(asQualifier(j.read("$.resulttype"))); - - JSONArray collfrom = j.read("$.collectedfrom"); - re.setCollectedfrom(collfrom.stream() - .map(c -> asKV((LinkedHashMap)c)) - .collect(Collectors.toList())); - - //TODO still to be mapped - //re.setCodeRepositoryUrl(j.read("$.coderepositoryurl")); - - break; - case "datasource": - re.setOfficialname(j.read("$.officialname.value")); - re.setWebsiteurl(j.read("$.websiteurl.value")); - - re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); - re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); - - break; - case "organization": - - break; - case "project": - mapTitle(j, re); - break; - } - - return new EntityRelEntity().setSource( - new TypedRow() - .setSourceId(e.getSource().getSourceId()) - .setDeleted(e.getSource().getDeleted()) - .setType(e.getSource().getType()) - .setOaf(new ObjectMapper().writeValueAsString(re))); - } - - private KeyValue asKV(LinkedHashMap j) { - final KeyValue kv = new KeyValue(); - kv.setKey((String) j.get("key")); - kv.setValue((String) j.get("value")); - return kv; - } - - private void mapTitle(DocumentContext j, RelatedEntity re) { - JSONArray a = j.read("$.title"); - if (!a.isEmpty()) { - re.setTitle(asStructuredProperty((LinkedHashMap) a.get(0))); - } - } - - private StructuredProperty asStructuredProperty(LinkedHashMap j) { - final StructuredProperty sp = new StructuredProperty(); - sp.setValue((String) j.get("value")); - sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); - return sp; - - } - - public Qualifier asQualifier(LinkedHashMap j) { - Qualifier q = new Qualifier(); - q.setClassid(j.get("classid")); - q.setClassname(j.get("classname")); - q.setSchemeid(j.get("schemeid")); - q.setSchemename(j.get("schemename")); - return q; - } - -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java index a441392b2..50b97dace 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java @@ -3,14 +3,22 @@ package eu.dnetlib.dhp.graph; import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; +import org.codehaus.jackson.map.ObjectMapper; +import java.io.IOException; import java.io.Serializable; import java.util.List; public class RelatedEntity implements Serializable { + private String id; + private String type; + + // common fields + private StructuredProperty title; + private String websiteurl; // datasource, organizations, projects + // results - private StructuredProperty title; // also for projects private String dateofacceptance; private String publisher; private List pid; @@ -20,11 +28,10 @@ public class RelatedEntity implements Serializable { // datasource private String officialname; - private String websiteurl; // also for organizations, projects private Qualifier datasourcetype; private Qualifier datasourcetypeui; - //private String aggregatortype; private Qualifier openairecompatibility; + //private String aggregatortype; // organization private String legalname; @@ -32,10 +39,28 @@ public class RelatedEntity implements Serializable { private Qualifier country; // project + private String projectTitle; private String code; private String acronym; private Qualifier contracttype; - private String fundingtree; + private List fundingtree; + + public static RelatedEntity parse(final String json) { + try { + return new ObjectMapper().readValue(json, RelatedEntity.class); + } catch (IOException e) { + throw new IllegalArgumentException("invalid RelatedEntity, cannot parse: " + json); + } + } + + public String getId() { + return id; + } + + public RelatedEntity setId(String id) { + this.id = id; + return this; + } public StructuredProperty getTitle() { return title; @@ -199,12 +224,30 @@ public class RelatedEntity implements Serializable { return this; } - public String getFundingtree() { + public List getFundingtree() { return fundingtree; } - public RelatedEntity setFundingtree(String fundingtree) { + public RelatedEntity setFundingtree(List fundingtree) { this.fundingtree = fundingtree; return this; } -} + + public String getProjectTitle() { + return projectTitle; + } + + public RelatedEntity setProjectTitle(String projectTitle) { + this.projectTitle = projectTitle; + return this; + } + + public String getType() { + return type; + } + + public RelatedEntity setType(String type) { + this.type = type; + return this; + } +} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java similarity index 54% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java index 1d55dda89..38bc2bae2 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkGraphIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java @@ -7,37 +7,34 @@ import org.apache.hadoop.fs.Path; import org.apache.spark.SparkConf; import org.apache.spark.sql.SparkSession; -public class SparkGraphIndexingJob { - - private final static String OUTPUT_BASE_PATH = "/tmp/openaire_provision"; +public class SparkXmlRecordBuilderJob { public static void main(String[] args) throws Exception { - final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphIndexingJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); + final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkXmlRecordBuilderJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); parser.parseArgument(args); final SparkConf conf = new SparkConf() - .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") - .set("hive.metastore.uris", parser.get("hive_metastore_uris")); + .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); final SparkSession spark = SparkSession .builder() .config(conf) - .appName(SparkGraphIndexingJob.class.getSimpleName()) + .appName(SparkXmlRecordBuilderJob.class.getSimpleName()) .master(parser.get("master")) - .enableHiveSupport() .getOrCreate(); final String inputPath = parser.get("sourcePath"); - final String hiveDbName = parser.get("hive_db_name"); + final String outputPath = parser.get("outputPath"); final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); - if (fs.exists(new Path(OUTPUT_BASE_PATH))) { - fs.delete(new Path(OUTPUT_BASE_PATH), true); - fs.mkdirs(new Path(OUTPUT_BASE_PATH)); + if (fs.exists(new Path(outputPath))) { + fs.delete(new Path(outputPath), true); + fs.mkdirs(new Path(outputPath)); } - new GraphJoiner().join(spark, inputPath, hiveDbName, OUTPUT_BASE_PATH); + new GraphJoiner(spark, inputPath, outputPath) + .adjacencyLists(); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json index a197abc78..3a02ab1a0 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json @@ -1,6 +1,6 @@ [ {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, {"paramName":"h", "paramLongName":"hive_metastore_uris","paramDescription": "the hive metastore uris", "paramRequired": true}, - {"paramName":"db", "paramLongName":"hive_db_name", "paramDescription": "the target hive database name", "paramRequired": true}, + {"paramName":"o", "paramLongName":"outputPath", "paramDescription": "the path used to store temporary output files", "paramRequired": true}, {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true} ] \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml index a91759ade..4b4d2c7bf 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -26,20 +26,20 @@ - + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] - + ${jobTracker} ${nameNode} yarn-cluster cluster - GraphIndexing - eu.dnetlib.dhp.graph.SparkGraphIndexingJob + build_adjacency_lists + eu.dnetlib.dhp.graph.SparkXmlRecordBuilderJob dhp-graph-provision-${projectVersion}.jar --executor-memory ${sparkExecutorMemory} @@ -53,8 +53,7 @@ -mt yarn-cluster --sourcePath${sourcePath} - --hive_db_name${hive_db_name} - --hive_metastore_uris${hive_metastore_uris} + --outputPath${outputPath} diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java index 2edb0aa70..0deb3d81a 100644 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java @@ -9,11 +9,11 @@ import java.io.InputStreamReader; public class MappingUtilsTest { - private MappingUtils utils; + private GraphMappingUtils utils; @Before public void setUp() { - utils = new MappingUtils(); + utils = new GraphMappingUtils(); } @Test From b5e1e2e5b290efd9e3aaad12d6dfcd37cecce8ab Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 30 Jan 2020 18:11:04 +0100 Subject: [PATCH 10/21] reintegrated changes from fcbc4ccd70b7edfdb8c041dc9dbdaed5943fa13a --- .../eu/dnetlib/dhp/graph/GraphMapper.java | 77 ------------------- .../main/java/eu/dnetlib/dhp/graph/Link.java | 30 -------- .../eu/dnetlib/dhp/graph/LinkedEntity.java | 25 ++---- .../dhp/graph/LinkedEntityWrapper.java | 40 ---------- .../dhp/graph/SparkXmlRecordBuilderJob.java | 3 +- .../main/java/eu/dnetlib/dhp/graph/Tuple.java | 29 +++++++ 6 files changed, 37 insertions(+), 167 deletions(-) delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java deleted file mode 100644 index bdfea7979..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMapper.java +++ /dev/null @@ -1,77 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import eu.dnetlib.dhp.schema.oaf.*; -import org.apache.spark.api.java.JavaSparkContext; -import org.apache.spark.sql.SparkSession; - -import java.io.IOException; -import java.util.stream.Collectors; - -public class GraphMapper { - - - public void map(final SparkSession spark, final String outPath) { - - final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); - - sc.textFile(outPath + "/linked_entities") - .map(LinkedEntityWrapper::parse) - .map(GraphMapper::asLinkedEntity) - .map(e -> new ObjectMapper().writeValueAsString(e)) - .saveAsTextFile(outPath + "/linked_entities_types"); - } - - private static LinkedEntity asLinkedEntity(final LinkedEntityWrapper lw) throws JsonProcessingException { - final LinkedEntity le = new LinkedEntity(); - - try { - le.setType(lw.getEntity().getType()); - le.setEntity(parseEntity(lw.getEntity().getOaf(), le.getType())); - le.setLinks(lw.getLinks() - .stream() - .map(l -> new Link() - .setRelation(parseRelation(l.getRelation().getOaf())) - .setRelatedEntity(RelatedEntity.parse(l.getTarget().getOaf()))) - .collect(Collectors.toList())); - return le; - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException(new ObjectMapper().writeValueAsString(lw), e); - } - } - - private static Relation parseRelation(final String s) { - try { - return new ObjectMapper().readValue(s, Relation.class); - } catch (IOException e) { - throw new IllegalArgumentException("unable to decode Relation: " + s); - } - } - - private static OafEntity parseEntity(final String json, final String type) { - final ObjectMapper o = new ObjectMapper(); - try { - switch (type) { - case "publication": - return o.readValue(json, Publication.class); - case "dataset": - return o.readValue(json, Dataset.class); - case "otherresearchproduct": - return o.readValue(json, OtherResearchProduct.class); - case "software": - return o.readValue(json, Software.class); - case "datasource": - return o.readValue(json, Datasource.class); - case "project": - return o.readValue(json, Project.class); - case "organization": - return o.readValue(json, Organization.class); - default: - throw new IllegalArgumentException("invalid entity type: " + type); - } - } catch (IOException e) { - throw new IllegalArgumentException("unable to decode oaf entity: " + json); - } - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java deleted file mode 100644 index 8426fbd12..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Link.java +++ /dev/null @@ -1,30 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import eu.dnetlib.dhp.schema.oaf.Relation; - -import java.io.Serializable; - -public class Link implements Serializable { - - private Relation relation; - - private RelatedEntity relatedEntity; - - public Relation getRelation() { - return relation; - } - - public Link setRelation(Relation relation) { - this.relation = relation; - return this; - } - - public RelatedEntity getRelatedEntity() { - return relatedEntity; - } - - public Link setRelatedEntity(RelatedEntity relatedEntity) { - this.relatedEntity = relatedEntity; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java index c7c2d1892..9e6fc0d38 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java @@ -1,41 +1,28 @@ package eu.dnetlib.dhp.graph; -import eu.dnetlib.dhp.schema.oaf.OafEntity; - import java.io.Serializable; import java.util.List; public class LinkedEntity implements Serializable { - private String type; + private TypedRow entity; - private OafEntity entity; + private List links; - private List links; - - public String getType() { - return type; - } - - public LinkedEntity setType(String type) { - this.type = type; - return this; - } - - public OafEntity getEntity() { + public TypedRow getEntity() { return entity; } - public LinkedEntity setEntity(OafEntity entity) { + public LinkedEntity setEntity(TypedRow entity) { this.entity = entity; return this; } - public List getLinks() { + public List getLinks() { return links; } - public LinkedEntity setLinks(List links) { + public LinkedEntity setLinks(List links) { this.links = links; return this; } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java deleted file mode 100644 index 17853208c..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntityWrapper.java +++ /dev/null @@ -1,40 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import com.fasterxml.jackson.databind.ObjectMapper; - -import java.io.IOException; -import java.io.Serializable; -import java.util.List; - -public class LinkedEntityWrapper implements Serializable { - - private TypedRow entity; - - private List links; - - public static LinkedEntityWrapper parse(final String s) { - try { - return new ObjectMapper().readValue(s, LinkedEntityWrapper.class); - } catch (IOException e) { - throw new IllegalArgumentException("unable to decode LinkedEntityWrapper: " + s); - } - } - - public TypedRow getEntity() { - return entity; - } - - public LinkedEntityWrapper setEntity(TypedRow entity) { - this.entity = entity; - return this; - } - - public List getLinks() { - return links; - } - - public LinkedEntityWrapper setLinks(List links) { - this.links = links; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java index 38bc2bae2..2a518eb92 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java @@ -34,7 +34,8 @@ public class SparkXmlRecordBuilderJob { } new GraphJoiner(spark, inputPath, outputPath) - .adjacencyLists(); + .adjacencyLists() + .asXML(); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java new file mode 100644 index 000000000..1eb0491a7 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java @@ -0,0 +1,29 @@ +package eu.dnetlib.dhp.graph; + +import java.io.Serializable; + +public class Tuple implements Serializable { + + private TypedRow relation; + + private TypedRow target; + + + public TypedRow getRelation() { + return relation; + } + + public Tuple setRelation(TypedRow relation) { + this.relation = relation; + return this; + } + + public TypedRow getTarget() { + return target; + } + + public Tuple setTarget(TypedRow target) { + this.target = target; + return this; + } +} From 49ef2f4eb1d3d64c98b242b028e097e78044c1b6 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 30 Jan 2020 18:20:26 +0100 Subject: [PATCH 11/21] removed input parameter specification, SparkXmlRecordBuilderJob doesn't need hive --- .../resources/eu/dnetlib/dhp/graph/input_graph_parameters.json | 1 - 1 file changed, 1 deletion(-) diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json index 3a02ab1a0..cbd4285bf 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json @@ -1,6 +1,5 @@ [ {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, - {"paramName":"h", "paramLongName":"hive_metastore_uris","paramDescription": "the hive metastore uris", "paramRequired": true}, {"paramName":"o", "paramLongName":"outputPath", "paramDescription": "the path used to store temporary output files", "paramRequired": true}, {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true} ] \ No newline at end of file From 7ba0f44d0583d441c7d12bc30cabfc0f72a25d2c Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 30 Jan 2020 18:21:07 +0100 Subject: [PATCH 12/21] WIP --- .../java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java index 2a518eb92..38bc2bae2 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java @@ -34,8 +34,7 @@ public class SparkXmlRecordBuilderJob { } new GraphJoiner(spark, inputPath, outputPath) - .adjacencyLists() - .asXML(); + .adjacencyLists(); } } From ed290ca8d77668d3a9b215cd0ec26e58f2871ac1 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 3 Feb 2020 10:35:51 +0100 Subject: [PATCH 13/21] builder pattern --- .../eu/dnetlib/dhp/schema/oaf/Datasource.java | 4 +-- .../java/eu/dnetlib/dhp/schema/oaf/Field.java | 6 +++-- .../eu/dnetlib/dhp/schema/oaf/Instance.java | 26 ++++++++++++------- 3 files changed, 22 insertions(+), 14 deletions(-) diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Datasource.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Datasource.java index f52a500fe..032468de2 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Datasource.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Datasource.java @@ -40,9 +40,9 @@ public class Datasource extends OafEntity implements Serializable { private List> odlanguages; - private List< Field> odcontenttypes; + private List> odcontenttypes; - private List< Field> accessinfopackage; + private List> accessinfopackage; // re3data fields private Field releasestartdate; diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java index 2ab0b4d3c..a75ed25c2 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java @@ -12,16 +12,18 @@ public class Field implements Serializable { return value; } - public void setValue(T value) { + public Field setValue(T value) { this.value = value; + return this; } public DataInfo getDataInfo() { return dataInfo; } - public void setDataInfo(DataInfo dataInfo) { + public Field setDataInfo(DataInfo dataInfo) { this.dataInfo = dataInfo; + return this; } @Override diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java index 8f852af65..06c907ee7 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java @@ -26,68 +26,74 @@ public class Instance implements Serializable { return license; } - public void setLicense(Field license) { + public Instance setLicense(Field license) { this.license = license; + return this; } public Qualifier getAccessright() { return accessright; } - public void setAccessright(Qualifier accessright) { + public Instance setAccessright(Qualifier accessright) { this.accessright = accessright; + return this; } public Qualifier getInstancetype() { return instancetype; } - public void setInstancetype(Qualifier instancetype) { + public Instance setInstancetype(Qualifier instancetype) { this.instancetype = instancetype; + return this; } public KeyValue getHostedby() { return hostedby; } - public void setHostedby(KeyValue hostedby) { + public Instance setHostedby(KeyValue hostedby) { this.hostedby = hostedby; + return this; } public List getUrl() { return url; } - public void setUrl(List url) { + public Instance setUrl(List url) { this.url = url; + return this; } public String getDistributionlocation() { return distributionlocation; } - public void setDistributionlocation(String distributionlocation) { + public Instance setDistributionlocation(String distributionlocation) { this.distributionlocation = distributionlocation; + return this; } public KeyValue getCollectedfrom() { return collectedfrom; } - public void setCollectedfrom(KeyValue collectedfrom) { + public Instance setCollectedfrom(KeyValue collectedfrom) { this.collectedfrom = collectedfrom; + return this; } public Field getDateofacceptance() { return dateofacceptance; } - public void setDateofacceptance(Field dateofacceptance) { + public Instance setDateofacceptance(Field dateofacceptance) { this.dateofacceptance = dateofacceptance; + return this; } - - public String toComparableString(){ return String.format("%s::%s::%s::%s", hostedby != null && hostedby.getKey()!= null ? hostedby.getKey().toLowerCase() : "", From d3b96f102b7ba953931c6681fa0b7fdbcc26c102 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 4 Feb 2020 14:10:58 +0100 Subject: [PATCH 14/21] builder pattern screws up the Parquet schema inference method, avoid using it in the bean definitions --- .../java/eu/dnetlib/dhp/schema/oaf/Field.java | 6 ++-- .../eu/dnetlib/dhp/schema/oaf/Instance.java | 24 +++++-------- .../eu/dnetlib/dhp/schema/oaf/Qualifier.java | 12 +++---- .../eu/dnetlib/dhp/schema/oaf/Result.java | 34 +++++++++---------- 4 files changed, 30 insertions(+), 46 deletions(-) diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java index a75ed25c2..2ab0b4d3c 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Field.java @@ -12,18 +12,16 @@ public class Field implements Serializable { return value; } - public Field setValue(T value) { + public void setValue(T value) { this.value = value; - return this; } public DataInfo getDataInfo() { return dataInfo; } - public Field setDataInfo(DataInfo dataInfo) { + public void setDataInfo(DataInfo dataInfo) { this.dataInfo = dataInfo; - return this; } @Override diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java index 06c907ee7..fe882cc1b 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Instance.java @@ -26,72 +26,64 @@ public class Instance implements Serializable { return license; } - public Instance setLicense(Field license) { + public void setLicense(Field license) { this.license = license; - return this; } public Qualifier getAccessright() { return accessright; } - public Instance setAccessright(Qualifier accessright) { + public void setAccessright(Qualifier accessright) { this.accessright = accessright; - return this; } public Qualifier getInstancetype() { return instancetype; } - public Instance setInstancetype(Qualifier instancetype) { + public void setInstancetype(Qualifier instancetype) { this.instancetype = instancetype; - return this; } public KeyValue getHostedby() { return hostedby; } - public Instance setHostedby(KeyValue hostedby) { + public void setHostedby(KeyValue hostedby) { this.hostedby = hostedby; - return this; } public List getUrl() { return url; } - public Instance setUrl(List url) { + public void setUrl(List url) { this.url = url; - return this; } public String getDistributionlocation() { return distributionlocation; } - public Instance setDistributionlocation(String distributionlocation) { + public void setDistributionlocation(String distributionlocation) { this.distributionlocation = distributionlocation; - return this; } public KeyValue getCollectedfrom() { return collectedfrom; } - public Instance setCollectedfrom(KeyValue collectedfrom) { + public void setCollectedfrom(KeyValue collectedfrom) { this.collectedfrom = collectedfrom; - return this; } public Field getDateofacceptance() { return dateofacceptance; } - public Instance setDateofacceptance(Field dateofacceptance) { + public void setDateofacceptance(Field dateofacceptance) { this.dateofacceptance = dateofacceptance; - return this; } public String toComparableString(){ diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java index 00ae88c52..ae2bf1a60 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Qualifier.java @@ -16,36 +16,32 @@ public class Qualifier implements Serializable { return classid; } - public Qualifier setClassid(String classid) { + public void setClassid(String classid) { this.classid = classid; - return this; } public String getClassname() { return classname; } - public Qualifier setClassname(String classname) { + public void setClassname(String classname) { this.classname = classname; - return this; } public String getSchemeid() { return schemeid; } - public Qualifier setSchemeid(String schemeid) { + public void setSchemeid(String schemeid) { this.schemeid = schemeid; - return this; } public String getSchemename() { return schemename; } - public Qualifier setSchemename(String schemename) { + public void setSchemename(String schemename) { this.schemename = schemename; - return this; } public String toComparableString() { diff --git a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java index eb5572ce1..4554d353c 100644 --- a/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java +++ b/dhp-schemas/src/main/java/eu/dnetlib/dhp/schema/oaf/Result.java @@ -210,6 +210,22 @@ public abstract class Result extends OafEntity implements Serializable { this.context = context; } + public Field getProcessingchargeamount() { + return processingchargeamount; + } + + public void setProcessingchargeamount(Field processingchargeamount) { + this.processingchargeamount = processingchargeamount; + } + + public Field getProcessingchargecurrency() { + return processingchargecurrency; + } + + public void setProcessingchargecurrency(Field processingchargecurrency) { + this.processingchargecurrency = processingchargecurrency; + } + public List getExternalReference() { return externalReference; } @@ -226,24 +242,6 @@ public abstract class Result extends OafEntity implements Serializable { this.instance = instance; } - public Field getProcessingchargeamount() { - return processingchargeamount; - } - - public Result setProcessingchargeamount(Field processingchargeamount) { - this.processingchargeamount = processingchargeamount; - return this; - } - - public Field getProcessingchargecurrency() { - return processingchargecurrency; - } - - public Result setProcessingchargecurrency(Field processingchargecurrency) { - this.processingchargecurrency = processingchargecurrency; - return this; - } - @Override public void mergeFrom(OafEntity e) { super.mergeFrom(e); From 956da2f923bf622b66c6c9695e21367656f303a9 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 13 Feb 2020 16:49:45 +0100 Subject: [PATCH 15/21] added Saxon-HE extension functions and Transformer factory class --- dhp-common/pom.xml | 4 ++ .../saxon/AbstractExtensionFunction.java | 32 +++++++++ .../dnetlib/dhp/utils/saxon/ExtractYear.java | 67 +++++++++++++++++++ .../dhp/utils/saxon/NormalizeDate.java | 66 ++++++++++++++++++ .../eu/dnetlib/dhp/utils/saxon/PickFirst.java | 53 +++++++++++++++ .../utils/saxon/SaxonTransformerFactory.java | 30 +++++++++ 6 files changed, 252 insertions(+) create mode 100644 dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java create mode 100644 dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java create mode 100644 dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java create mode 100644 dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java create mode 100644 dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java diff --git a/dhp-common/pom.xml b/dhp-common/pom.xml index 43c2a3834..ae7302b98 100644 --- a/dhp-common/pom.xml +++ b/dhp-common/pom.xml @@ -42,6 +42,10 @@ com.rabbitmq amqp-client + + net.sf.saxon + Saxon-HE + diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java new file mode 100644 index 000000000..bd3962440 --- /dev/null +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/AbstractExtensionFunction.java @@ -0,0 +1,32 @@ +package eu.dnetlib.dhp.utils.saxon; + +import net.sf.saxon.expr.XPathContext; +import net.sf.saxon.lib.ExtensionFunctionCall; +import net.sf.saxon.lib.ExtensionFunctionDefinition; +import net.sf.saxon.om.Sequence; +import net.sf.saxon.om.StructuredQName; +import net.sf.saxon.trans.XPathException; + +public abstract class AbstractExtensionFunction extends ExtensionFunctionDefinition { + + public static String DEFAULT_SAXON_EXT_NS_URI = "http://www.d-net.research-infrastructures.eu/saxon-extension"; + + public abstract String getName(); + public abstract Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException; + + @Override + public StructuredQName getFunctionQName() { + return new StructuredQName("dnet", DEFAULT_SAXON_EXT_NS_URI, getName()); + } + + @Override + public ExtensionFunctionCall makeCallExpression() { + return new ExtensionFunctionCall() { + @Override + public Sequence call(XPathContext context, Sequence[] arguments) throws XPathException { + return doCall(context, arguments); + } + }; + } + +} \ No newline at end of file diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java new file mode 100644 index 000000000..f90e2a23e --- /dev/null +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/ExtractYear.java @@ -0,0 +1,67 @@ +package eu.dnetlib.dhp.utils.saxon; + +import net.sf.saxon.expr.XPathContext; +import net.sf.saxon.om.Item; +import net.sf.saxon.om.Sequence; +import net.sf.saxon.trans.XPathException; +import net.sf.saxon.value.SequenceType; +import net.sf.saxon.value.StringValue; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.GregorianCalendar; + +public class ExtractYear extends AbstractExtensionFunction { + + private static final String[] dateFormats = { "yyyy-MM-dd", "yyyy/MM/dd" }; + + @Override + public String getName() { + return "extractYear"; + } + + @Override + public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException { + if (arguments == null | arguments.length == 0) { + return new StringValue(""); + } + final Item item = arguments[0].head(); + if (item == null) { + return new StringValue(""); + } + return new StringValue(_year(item.getStringValue())); + } + + @Override + public int getMinimumNumberOfArguments() { + return 0; + } + + @Override + public int getMaximumNumberOfArguments() { + return 1; + } + + @Override + public SequenceType[] getArgumentTypes() { + return new SequenceType[] { SequenceType.OPTIONAL_ITEM }; + } + + @Override + public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) { + return SequenceType.SINGLE_STRING; + } + + private String _year(String s) { + Calendar c = new GregorianCalendar(); + for (String format : dateFormats) { + try { + c.setTime(new SimpleDateFormat(format).parse(s)); + String year = String.valueOf(c.get(Calendar.YEAR)); + return year; + } catch (ParseException e) {} + } + return ""; + } +} diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java new file mode 100644 index 000000000..634e08788 --- /dev/null +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/NormalizeDate.java @@ -0,0 +1,66 @@ +package eu.dnetlib.dhp.utils.saxon; + +import net.sf.saxon.expr.XPathContext; +import net.sf.saxon.om.Sequence; +import net.sf.saxon.trans.XPathException; +import net.sf.saxon.value.SequenceType; +import net.sf.saxon.value.StringValue; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class NormalizeDate extends AbstractExtensionFunction { + + private static final String[] normalizeDateFormats = { "yyyy-MM-dd'T'hh:mm:ss", "yyyy-MM-dd", "yyyy/MM/dd", "yyyy" }; + + private static final String normalizeOutFormat = new String("yyyy-MM-dd'T'hh:mm:ss'Z'"); + + @Override + public String getName() { + return "normalizeDate"; + } + + @Override + public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException { + if (arguments == null | arguments.length == 0) { + return new StringValue(""); + } + String s = arguments[0].head().getStringValue(); + return new StringValue(_year(s)); + } + + @Override + public int getMinimumNumberOfArguments() { + return 0; + } + + @Override + public int getMaximumNumberOfArguments() { + return 1; + } + + @Override + public SequenceType[] getArgumentTypes() { + return new SequenceType[] { SequenceType.OPTIONAL_ITEM }; + } + + @Override + public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) { + return SequenceType.SINGLE_STRING; + } + + private String _year(String s) { + final String date = s != null ? s.trim() : ""; + + for (String format : normalizeDateFormats) { + try { + Date parse = new SimpleDateFormat(format).parse(date); + String res = new SimpleDateFormat(normalizeOutFormat).format(parse); + return res; + } catch (ParseException e) {} + } + return ""; + } + +} diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java new file mode 100644 index 000000000..1f209bed0 --- /dev/null +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java @@ -0,0 +1,53 @@ +package eu.dnetlib.dhp.utils.saxon; + +import net.sf.saxon.expr.XPathContext; +import net.sf.saxon.om.Sequence; +import net.sf.saxon.trans.XPathException; +import net.sf.saxon.value.SequenceType; +import net.sf.saxon.value.StringValue; +import org.apache.commons.lang3.StringUtils; + +public class PickFirst extends AbstractExtensionFunction { + + @Override + public String getName() { + return "pickFirst"; + } + + @Override + public Sequence doCall(XPathContext context, Sequence[] arguments) throws XPathException { + if (arguments == null | arguments.length == 0) { + return new StringValue(""); + } + String s1 = arguments[0].head().getStringValue(); + + if (arguments.length > 1) { + String s2 = arguments[1].head().getStringValue(); + + return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : ""); + } else { + return new StringValue(StringUtils.isNotBlank(s1) ? s1 : ""); + } + } + + @Override + public int getMinimumNumberOfArguments() { + return 0; + } + + @Override + public int getMaximumNumberOfArguments() { + return 2; + } + + @Override + public SequenceType[] getArgumentTypes() { + return new SequenceType[] { SequenceType.OPTIONAL_ITEM }; + } + + @Override + public SequenceType getResultType(SequenceType[] suppliedArgumentTypes) { + return SequenceType.SINGLE_STRING; + } + +} diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java new file mode 100644 index 000000000..611709ff0 --- /dev/null +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/SaxonTransformerFactory.java @@ -0,0 +1,30 @@ +package eu.dnetlib.dhp.utils.saxon; + +import net.sf.saxon.Configuration; +import net.sf.saxon.TransformerFactoryImpl; + +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.stream.StreamSource; +import java.io.StringReader; + +public class SaxonTransformerFactory { + + /** + * Creates the index record transformer from the given XSLT + * @param xslt + * @return + * @throws TransformerException + */ + public static Transformer newInstance(final String xslt) throws TransformerException { + + final TransformerFactoryImpl factory = new TransformerFactoryImpl(); + final Configuration conf = factory.getConfiguration(); + conf.registerExtensionFunction(new ExtractYear()); + conf.registerExtensionFunction(new NormalizeDate()); + conf.registerExtensionFunction(new PickFirst()); + + return factory.newTransformer(new StreamSource(new StringReader(xslt))); + } + +} From 1fee6e2b7e4b06226a3769bb961de7657b858f86 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Thu, 13 Feb 2020 16:53:27 +0100 Subject: [PATCH 16/21] implemented XML records construction and serialization, indexing WIP --- .gitignore | 4 +- .../dhp/graph/SparkGraphImporterJob.java | 15 +- .../job-override.properties | 16 +- dhp-workflows/dhp-graph-provision/pom.xml | 46 + .../eu/dnetlib/dhp/graph/GraphJoiner.java | 151 ++- .../dnetlib/dhp/graph/GraphMappingUtils.java | 160 --- .../eu/dnetlib/dhp/graph/LinkedEntity.java | 29 - .../dhp/graph/SparkXmlIndexingJob.java | 188 ++++ .../dhp/graph/SparkXmlRecordBuilderJob.java | 38 +- .../main/java/eu/dnetlib/dhp/graph/Tuple.java | 29 - .../eu/dnetlib/dhp/graph/TupleWrapper.java | 29 - .../graph/{ => model}/EntityRelEntity.java | 3 +- .../dhp/graph/{ => model}/JoinedEntity.java | 11 +- .../eu/dnetlib/dhp/graph/model/Links.java | 6 + .../dhp/graph/{ => model}/RelatedEntity.java | 22 +- .../eu/dnetlib/dhp/graph/model/Tuple2.java | 28 + .../dhp/graph/{ => model}/TypedRow.java | 2 +- .../dnetlib/dhp/graph/utils/ContextDef.java | 51 + .../dhp/graph/utils/ContextMapper.java | 45 + .../dhp/graph/utils/GraphMappingUtils.java | 254 +++++ .../graph/utils/ISLookupClientFactory.java | 24 + .../dhp/graph/utils/LicenseComparator.java | 49 + .../utils/StreamingInputDocumentFactory.java | 253 +++++ .../dhp/graph/utils/TemplateFactory.java | 107 ++ .../dhp/graph/utils/TemplateResources.java | 54 + .../dhp/graph/utils/XmlRecordFactory.java | 962 ++++++++++++++++++ .../graph/utils/XmlSerializationUtils.java | 151 +++ .../javax.xml.transform.TransformerFactory | 1 + ...> input_params_build_adjacency_lists.json} | 3 +- .../dhp/graph/input_params_update_index.json | 7 + .../dnetlib/dhp/graph/oozie_app/workflow.xml | 45 +- .../eu/dnetlib/dhp/graph/template/child.st | 3 + .../eu/dnetlib/dhp/graph/template/entity.st | 10 + .../eu/dnetlib/dhp/graph/template/instance.st | 4 + .../eu/dnetlib/dhp/graph/template/record.st | 17 + .../eu/dnetlib/dhp/graph/template/rel.st | 4 + .../dnetlib/dhp/graph/template/webresource.st | 3 + .../dnetlib/dhp/graph/MappingUtilsTest.java | 24 +- .../dhp/graph/XmlRecordFactoryTest.java | 55 + .../eu/dnetlib/dhp/graph/software.json | 1 + pom.xml | 69 +- 41 files changed, 2571 insertions(+), 402 deletions(-) delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{ => model}/EntityRelEntity.java (96%) rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{ => model}/JoinedEntity.java (65%) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Links.java rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{ => model}/RelatedEntity.java (94%) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Tuple2.java rename dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/{ => model}/TypedRow.java (96%) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextDef.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextMapper.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/GraphMappingUtils.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ISLookupClientFactory.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/LicenseComparator.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/StreamingInputDocumentFactory.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateFactory.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateResources.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlSerializationUtils.java create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/META-INF/services/javax.xml.transform.TransformerFactory rename dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/{input_graph_parameters.json => input_params_build_adjacency_lists.json} (65%) create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_update_index.json create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/child.st create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/entity.st create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/instance.st create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/record.st create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/rel.st create mode 100644 dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/webresource.st create mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java create mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json diff --git a/.gitignore b/.gitignore index 3f00d9729..66fe55aa9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .DS_Store .idea *.iml +*.ipr +*.iws *~ .classpath /*/.classpath @@ -18,5 +20,5 @@ /*/build /build spark-warehouse -/*/*/job-override.properties +/**/job-override.properties diff --git a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java index a6a4e9291..5401b71c1 100644 --- a/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java +++ b/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java @@ -33,17 +33,12 @@ public class SparkGraphImporterJob { // Read the input file and convert it into RDD of serializable object GraphMappingUtils.types.forEach((name, clazz) -> { - final JavaRDD> inputRDD = sc.sequenceFile(inputPath + "/" + name, Text.class, Text.class) - .map(item -> new Tuple2<>(item._1.toString(), item._2.toString())); - - spark.createDataset(inputRDD - .filter(s -> s._1().equals(clazz.getName())) - .map(Tuple2::_2) - .map(s -> new ObjectMapper().readValue(s, clazz)) + spark.createDataset(sc.sequenceFile(inputPath + "/" + name, Text.class, Text.class) + .map(s -> new ObjectMapper().readValue(s._2().toString(), clazz)) .rdd(), Encoders.bean(clazz)) - .write() - .mode(SaveMode.Overwrite) - .saveAsTable(hiveDbName + "." + name); + .write() + .mode(SaveMode.Overwrite) + .saveAsTable(hiveDbName + "." + name); }); } diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties index acaf16717..c7b173a14 100644 --- a/dhp-workflows/dhp-graph-provision/job-override.properties +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -1,5 +1,11 @@ -sparkDriverMemory=7G -sparkExecutorMemory=7G -hive_db_name=claudio -sourcePath=/tmp/db_openaireplus_services_beta.export.2019.11.06 -outputPath=/tmp/openaire_provision \ No newline at end of file +sparkDriverMemory=8G +sparkExecutorMemory=8G +#isLookupUrl=http://services.openaire.eu:8280/is/services/isLookUp +isLookupUrl=http://beta.services.openaire.eu:8280/is/services/isLookUp?wsdl +sourcePath=/tmp/db_openaireplus_services.export_dhp.2020.02.03 +outputPath=/tmp/openaire_provision +format=TMF +batchSize=1000 +sparkExecutorCoresForIndexing=1 +sparkExecutorInstances=10 +reuseRecords=false \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/pom.xml b/dhp-workflows/dhp-graph-provision/pom.xml index 62d8ac2ae..5e6beb249 100644 --- a/dhp-workflows/dhp-graph-provision/pom.xml +++ b/dhp-workflows/dhp-graph-provision/pom.xml @@ -23,6 +23,52 @@ com.jayway.jsonpath json-path + + dom4j + dom4j + + + jaxen + jaxen + + + com.mycila.xmltool + xmltool + + + org.antlr + stringtemplate + + + org.apache.solr + solr-solrj + + + com.lucidworks.spark + spark-solr + + + + org.apache.httpcomponents + httpclient + + + org.noggit + noggit + + + org.apache.zookeeper + zookeeper + + + + org.apache.cxf + cxf-rt-transports-http + + + eu.dnetlib + cnr-rmi-api + eu.dnetlib.dhp diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java index f7bf0da39..062c8886b 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphJoiner.java @@ -6,9 +6,14 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; +import eu.dnetlib.dhp.graph.model.*; +import eu.dnetlib.dhp.graph.utils.ContextMapper; +import eu.dnetlib.dhp.graph.utils.GraphMappingUtils; +import eu.dnetlib.dhp.graph.utils.XmlRecordFactory; import eu.dnetlib.dhp.schema.oaf.*; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.GzipCodec; +import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; @@ -18,9 +23,12 @@ import scala.Tuple2; import java.io.IOException; import java.io.Serializable; +import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; +import static eu.dnetlib.dhp.graph.utils.GraphMappingUtils.asRelatedEntity; + /** * Joins the graph nodes by resolving the links of distance = 1 to create an adjacency list of linked objects. * The operation considers all the entity types (publication, dataset, software, ORP, project, datasource, organization, @@ -41,16 +49,21 @@ import java.util.stream.Collectors; */ public class GraphJoiner implements Serializable { - public static final int MAX_RELS = 10; + public static final int MAX_RELS = 100; + + public static final String schemaLocation = "https://www.openaire.eu/schema/1.0/oaf-1.0.xsd"; private SparkSession spark; + private ContextMapper contextMapper; + private String inputPath; private String outPath; - public GraphJoiner(SparkSession spark, String inputPath, String outPath) { + public GraphJoiner(SparkSession spark, ContextMapper contextMapper, String inputPath, String outPath) { this.spark = spark; + this.contextMapper = contextMapper; this.inputPath = inputPath; this.outPath = outPath; } @@ -68,7 +81,7 @@ public class GraphJoiner implements Serializable { JavaPairRDD publication = readPathEntity(sc, getInputPath(), "publication"); // create the union between all the entities - final String entitiesPath = getOutPath() + "/0_entities"; + final String entitiesPath = getOutPath() + "/entities"; datasource .union(organization) .union(project) @@ -94,102 +107,74 @@ public class GraphJoiner implements Serializable { .flatMap(p -> p.iterator()) .mapToPair(p -> new Tuple2<>(p.getRelation().getTargetId(), p)); - final String joinByTargetPath = getOutPath() + "/1_join_by_target"; - relation + //final String bySource = getOutPath() + "/1_join_by_target"; + JavaPairRDD bySource = relation .join(entities .filter(e -> !e._2().getSource().getDeleted()) - .mapToPair(e -> new Tuple2<>(e._1(), new GraphMappingUtils().pruneModel(e._2())))) + .mapToPair(e -> new Tuple2<>(e._1(), asRelatedEntity(e._2())))) .map(s -> new EntityRelEntity() .setRelation(s._2()._1().getRelation()) .setTarget(s._2()._2().getSource())) - .map(GraphMappingUtils::serialize) - .saveAsTextFile(joinByTargetPath, GzipCodec.class); - - JavaPairRDD bySource = sc.textFile(joinByTargetPath) - .map(t -> new ObjectMapper().readValue(t, EntityRelEntity.class)) .mapToPair(t -> new Tuple2<>(t.getRelation().getSourceId(), t)); - final String linkedEntityPath = getOutPath() + "/2_linked_entities"; + final XmlRecordFactory recordFactory = new XmlRecordFactory(contextMapper, false, schemaLocation, new HashSet<>()); entities .union(bySource) .groupByKey() // by source id - .map(p -> toLinkedEntity(p)) - .map(e -> new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL).writeValueAsString(e)) - .saveAsTextFile(linkedEntityPath, GzipCodec.class); - - final String joinedEntitiesPath = getOutPath() + "/3_joined_entities"; - sc.textFile(linkedEntityPath) - .map(s -> new ObjectMapper().readValue(s, LinkedEntity.class)) .map(l -> toJoinedEntity(l)) - .map(j -> new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL).writeValueAsString(j)) - .saveAsTextFile(joinedEntitiesPath); + .mapToPair(je -> new Tuple2<>( + new Text(je.getEntity().getId()), + new Text(recordFactory.build(je)))) + .saveAsHadoopFile(getOutPath() + "/xml", Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class); return this; } public GraphJoiner asXML() { final JavaSparkContext sc = new JavaSparkContext(getSpark().sparkContext()); + final XmlRecordFactory recordFactory = new XmlRecordFactory(contextMapper, true, "", new HashSet<>()); + final ObjectMapper mapper = new ObjectMapper(); - final String joinedEntitiesPath = getOutPath() + "/3_joined_entities"; + final String joinedEntitiesPath = getOutPath() + "/1_joined_entities"; sc.textFile(joinedEntitiesPath) - .map(s -> new ObjectMapper().readValue(s, LinkedEntity.class)) - .map(l -> toXML(l)) - .saveAsTextFile(getOutPath() + "/4_xml"); + .map(s -> mapper.readValue(s, JoinedEntity.class)) + .mapToPair(je -> new Tuple2<>(new Text(je.getEntity().getId()), new Text(recordFactory.build(je)))) + .saveAsHadoopFile(getOutPath() + "/2_xml", Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class); return this; } - private String toXML(LinkedEntity l) { - - return null; - } - public SparkSession getSpark() { return spark; } - public GraphJoiner setSpark(SparkSession spark) { - this.spark = spark; - return this; - } - public String getInputPath() { return inputPath; } - public GraphJoiner setInputPath(String inputPath) { - this.inputPath = inputPath; - return this; - } - public String getOutPath() { return outPath; } - public GraphJoiner setOutPath(String outPath) { - this.outPath = outPath; - return this; - } - // HELPERS private OafEntity parseOaf(final String json, final String type) { final ObjectMapper o = new ObjectMapper(); try { - switch (type) { - case "publication": + switch (GraphMappingUtils.EntityType.valueOf(type)) { + case publication: return o.readValue(json, Publication.class); - case "dataset": + case dataset: return o.readValue(json, Dataset.class); - case "otherresearchproduct": + case otherresearchproduct: return o.readValue(json, OtherResearchProduct.class); - case "software": + case software: return o.readValue(json, Software.class); - case "datasource": + case datasource: return o.readValue(json, Datasource.class); - case "organization": + case organization: return o.readValue(json, Organization.class); - case "project": + case project: return o.readValue(json, Project.class); default: throw new IllegalArgumentException("invalid type: " + type); @@ -199,56 +184,36 @@ public class GraphJoiner implements Serializable { } } - /** - * Converts the result of grouping pairs and the entities by source id to LinkedEntity - * @param p - * @return - */ - private LinkedEntity toLinkedEntity(Tuple2> p) { - final LinkedEntity e = new LinkedEntity(); - final List links = Lists.newArrayList(); + private JoinedEntity toJoinedEntity(Tuple2> p) { + final ObjectMapper o = new ObjectMapper(); + final JoinedEntity j = new JoinedEntity(); + final Links links2 = new Links(); for(EntityRelEntity rel : p._2()) { - if (rel.hasMainEntity() & e.getEntity() == null) { - e.setEntity(rel.getSource()); + if (rel.hasMainEntity() & j.getEntity() == null) { + j.setType(rel.getSource().getType()); + j.setEntity(parseOaf(rel.getSource().getOaf(), rel.getSource().getType())); } if (rel.hasRelatedEntity()) { - links.add(new Tuple() - .setRelation(rel.getRelation()) - .setTarget(rel.getTarget())); + try { + links2.add( + new eu.dnetlib.dhp.graph.model.Tuple2() + .setRelation(o.readValue(rel.getRelation().getOaf(), Relation.class)) + .setRelatedEntity(o.readValue(rel.getTarget().getOaf(), RelatedEntity.class))); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } } } - e.setLinks(links); - if (e.getEntity() == null) { + j.setLinks(links2); + if (j.getEntity() == null) { throw new IllegalStateException("missing main entity on '" + p._1() + "'"); } - return e; - } - - /** - * Converts a LinkedEntity to a JoinedEntity - * @param l - * @return - */ - private JoinedEntity toJoinedEntity(LinkedEntity l) { - return new JoinedEntity().setType(l.getEntity().getType()) - .setEntity(parseOaf(l.getEntity().getOaf(), l.getEntity().getType())) - .setLinks(l.getLinks() - .stream() - .map(t -> { - final ObjectMapper o = new ObjectMapper(); - try { - return new Tuple2<>( - o.readValue(t.getRelation().getOaf(), Relation.class), - o.readValue(t.getTarget().getOaf(), RelatedEntity.class)); - } catch (IOException e) { - throw new IllegalArgumentException(e); - } - }).collect(Collectors.toList())); + return j; } /** * Reads a set of eu.dnetlib.dhp.schema.oaf.OafEntity objects from a sequence file , - * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.TypedRow + * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.model.TypedRow * @param sc * @param inputPath * @param type @@ -270,7 +235,7 @@ public class GraphJoiner implements Serializable { /** * Reads a set of eu.dnetlib.dhp.schema.oaf.Relation objects from a sequence file , - * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.TypedRow + * extracts necessary information using json path, wraps the oaf object in a eu.dnetlib.dhp.graph.model.TypedRow * @param sc * @param inputPath * @return the JavaRDD containing all the relationships diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java deleted file mode 100644 index e3622cd20..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/GraphMappingUtils.java +++ /dev/null @@ -1,160 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; -import com.jayway.jsonpath.DocumentContext; -import com.jayway.jsonpath.JsonPath; -import eu.dnetlib.dhp.schema.oaf.*; -import net.minidev.json.JSONArray; -import org.apache.commons.lang3.StringUtils; - -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.stream.Collectors; - -public class GraphMappingUtils { - - public final static Map types = Maps.newHashMap(); - - static { - types.put("datasource", Datasource.class); - types.put("organization", Organization.class); - types.put("project", Project.class); - types.put("dataset", Dataset.class); - types.put("otherresearchproduct", OtherResearchProduct.class); - types.put("software", Software.class); - types.put("publication", Publication.class); - types.put("relation", Relation.class); - } - - public static EntityRelEntity pruneModel(EntityRelEntity e) { - - final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); - final RelatedEntity re = new RelatedEntity().setId(j.read("$.id")).setType(e.getSource().getType()); - - switch (e.getSource().getType()) { - case "publication": - case "dataset": - case "otherresearchproduct": - case "software": - mapTitle(j, re); - re.setDateofacceptance(j.read("$.dateofacceptance.value")); - re.setPublisher(j.read("$.publisher.value")); - - JSONArray pids = j.read("$.pid"); - re.setPid(pids.stream() - .map(p -> asStructuredProperty((LinkedHashMap) p)) - .collect(Collectors.toList())); - - re.setResulttype(asQualifier(j.read("$.resulttype"))); - - JSONArray collfrom = j.read("$.collectedfrom"); - re.setCollectedfrom(collfrom.stream() - .map(c -> asKV((LinkedHashMap)c)) - .collect(Collectors.toList())); - - //TODO still to be mapped - //re.setCodeRepositoryUrl(j.read("$.coderepositoryurl")); - - break; - case "datasource": - re.setOfficialname(j.read("$.officialname.value")); - re.setWebsiteurl(j.read("$.websiteurl.value")); - re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); - re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); - - break; - case "organization": - re.setLegalname(j.read("$.legalname.value")); - re.setLegalshortname(j.read("$.legalshortname.value")); - re.setCountry(asQualifier(j.read("$.country"))); - - break; - case "project": - re.setProjectTitle(j.read("$.title.value")); - re.setCode(j.read("$.code.value")); - re.setAcronym(j.read("$.acronym.value")); - re.setContracttype(asQualifier(j.read("$.contracttype"))); - - JSONArray f = j.read("$.fundingtree"); - if (!f.isEmpty()) { - re.setFundingtree(f.stream() - .map(s -> s.toString()) - .collect(Collectors.toList())); - } - - break; - } - return new EntityRelEntity().setSource( - new TypedRow() - .setSourceId(e.getSource().getSourceId()) - .setDeleted(e.getSource().getDeleted()) - .setType(e.getSource().getType()) - .setOaf(serialize(re))); - } - - private static KeyValue asKV(LinkedHashMap j) { - final KeyValue kv = new KeyValue(); - kv.setKey((String) j.get("key")); - kv.setValue((String) j.get("value")); - return kv; - } - - private static void mapTitle(DocumentContext j, RelatedEntity re) { - final JSONArray a = j.read("$.title"); - if (!a.isEmpty()) { - final StructuredProperty sp = asStructuredProperty((LinkedHashMap) a.get(0)); - if(StringUtils.isNotBlank(sp.getValue())) { - re.setTitle(sp); - } - } - } - - private static StructuredProperty asStructuredProperty(LinkedHashMap j) { - final StructuredProperty sp = new StructuredProperty(); - final String value = (String) j.get("value"); - if (StringUtils.isNotBlank(value)) { - sp.setValue((String) j.get("value")); - sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); - } - return sp; - } - - public static Qualifier asQualifier(LinkedHashMap j) { - final Qualifier q = new Qualifier(); - - final String classid = j.get("classid"); - if (StringUtils.isNotBlank(classid)) { - q.setClassid(classid); - } - - final String classname = j.get("classname"); - if (StringUtils.isNotBlank(classname)) { - q.setClassname(classname); - } - - final String schemeid = j.get("schemeid"); - if (StringUtils.isNotBlank(schemeid)) { - q.setSchemeid(schemeid); - } - - final String schemename = j.get("schemename"); - if (StringUtils.isNotBlank(schemename)) { - q.setSchemename(schemename); - } - return q; - } - - public static String serialize(final Object o) { - try { - return new ObjectMapper() - .setSerializationInclusion(JsonInclude.Include.NON_NULL) - .writeValueAsString(o); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("unable to serialize: " + o.toString(), e); - } - } - -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java deleted file mode 100644 index 9e6fc0d38..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/LinkedEntity.java +++ /dev/null @@ -1,29 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import java.io.Serializable; -import java.util.List; - -public class LinkedEntity implements Serializable { - - private TypedRow entity; - - private List links; - - public TypedRow getEntity() { - return entity; - } - - public LinkedEntity setEntity(TypedRow entity) { - this.entity = entity; - return this; - } - - public List getLinks() { - return links; - } - - public LinkedEntity setLinks(List links) { - this.links = links; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java new file mode 100644 index 000000000..e13f8bbe2 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java @@ -0,0 +1,188 @@ +package eu.dnetlib.dhp.graph; + +import com.lucidworks.spark.util.SolrSupport; +import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.graph.utils.ISLookupClientFactory; +import eu.dnetlib.dhp.graph.utils.StreamingInputDocumentFactory; +import eu.dnetlib.dhp.utils.saxon.SaxonTransformerFactory; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpDocumentNotFoundException; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.io.Text; +import org.apache.solr.common.SolrInputDocument; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.JavaSparkContext; +import org.apache.spark.rdd.RDD; +import org.apache.spark.sql.SparkSession; + +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class SparkXmlIndexingJob { + + private static final Log log = LogFactory.getLog(SparkXmlIndexingJob.class); + + private static final Integer DEFAULT_BATCH_SIZE = 1000; + + private static final String LAYOUT = "index"; + + public static void main(String[] args) throws Exception { + + final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkXmlIndexingJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_params_update_index.json"))); + parser.parseArgument(args); + + final String inputPath = parser.get("sourcePath"); + final String isLookupUrl = parser.get("isLookupUrl"); + final String format = parser.get("format"); + final Integer batchSize = parser.getObjectMap().containsKey("batckSize") ? Integer.valueOf(parser.get("batchSize")) : DEFAULT_BATCH_SIZE; + + final ISLookUpService isLookup = ISLookupClientFactory.getLookUpService(isLookupUrl); + final String fields = getLayoutSource(isLookup, format); + final String xslt = getLayoutTransformer(isLookup); + + final String dsId = getDsId(format, isLookup); + final String zkHost = getZkHost(isLookup); + final String version = getRecordDatestamp(); + + final String indexRecordXslt = getLayoutTransformer(format, fields, xslt); + + log.info("indexRecordTransformer: " + indexRecordXslt); + + final String master = parser.get("master"); + final SparkConf conf = new SparkConf() + .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); + + try(SparkSession spark = getSession(conf, master)) { + + final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); + + RDD docs = sc.sequenceFile(inputPath, Text.class, Text.class) + .map(t -> t._2().toString()) + .map(s -> toIndexRecord(SaxonTransformerFactory.newInstance(indexRecordXslt), s)) + .map(s -> new StreamingInputDocumentFactory(version, dsId).parseDocument(s)) + .rdd(); + + SolrSupport.indexDocs(zkHost, format + "-" + LAYOUT + "-openaire", batchSize, docs); + } + } + + private static SparkSession getSession(SparkConf conf, String master) { + return SparkSession + .builder() + .config(conf) + .appName(SparkXmlRecordBuilderJob.class.getSimpleName()) + .master(master) + .getOrCreate(); + } + + private static String toIndexRecord(Transformer tr, final String record) { + final StreamResult res = new StreamResult(new StringWriter()); + try { + tr.transform(new StreamSource(new StringReader(record)), res); + return res.getWriter().toString(); + } catch (Throwable e) { + System.out.println("XPathException on record:\n" + record); + throw new IllegalArgumentException(e); + } + } + + /** + * Creates the XSLT responsible for building the index xml records. + * + * @param format Metadata format name (DMF|TMF) + * @param xslt xslt for building the index record transformer + * @param fields the list of fields + * @return the javax.xml.transform.Transformer + * @throws ISLookUpException could happen + * @throws IOException could happen + * @throws TransformerException could happen + */ + private static String getLayoutTransformer(String format, String fields, String xslt) throws TransformerException { + + final Transformer layoutTransformer = SaxonTransformerFactory.newInstance(xslt); + final StreamResult layoutToXsltXslt = new StreamResult(new StringWriter()); + + layoutTransformer.setParameter("format", format); + layoutTransformer.transform(new StreamSource(new StringReader(fields)), layoutToXsltXslt); + + return layoutToXsltXslt.getWriter().toString(); + } + + /** + * method return a solr-compatible string representation of a date, used to mark all records as indexed today + * @return the parsed date + */ + public static String getRecordDatestamp() { + return new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss'Z'").format(new Date()); + } + + /** + * Method retrieves from the information system the list of fields associated to the given MDFormat name + * + * @param isLookup the ISLookup service stub + * @param format the Metadata format name + * @return the string representation of the list of fields to be indexed + * + * @throws ISLookUpDocumentNotFoundException + * @throws ISLookUpException + */ + private static String getLayoutSource(final ISLookUpService isLookup, final String format) throws ISLookUpDocumentNotFoundException, ISLookUpException { + return doLookup(isLookup, String.format( + "collection('')//RESOURCE_PROFILE[.//RESOURCE_TYPE/@value = 'MDFormatDSResourceType' and .//NAME='%s']//LAYOUT[@name='%s']", format, LAYOUT)); + } + + /** + * Method retrieves from the information system the openaireLayoutToRecordStylesheet + * + * @param isLookup the ISLookup service stub + * @return the string representation of the XSLT contained in the transformation rule profile + * + * @throws ISLookUpDocumentNotFoundException + * @throws ISLookUpException + */ + private static String getLayoutTransformer(ISLookUpService isLookup) throws ISLookUpException { + return doLookup(isLookup, "collection('/db/DRIVER/TransformationRuleDSResources/TransformationRuleDSResourceType')" + + "//RESOURCE_PROFILE[./BODY/CONFIGURATION/SCRIPT/TITLE/text() = 'openaireLayoutToRecordStylesheet']//CODE/node()"); + } + + /** + * Method retrieves from the information system the IndexDS profile ID associated to the given MDFormat name + * @param format + * @param isLookup + * @return the IndexDS identifier + * @throws ISLookUpException + */ + private static String getDsId(String format, ISLookUpService isLookup) throws ISLookUpException { + return doLookup(isLookup, String.format("collection('/db/DRIVER/IndexDSResources/IndexDSResourceType')" + + "//RESOURCE_PROFILE[./BODY/CONFIGURATION/METADATA_FORMAT/text() = '%s']//RESOURCE_IDENTIFIER/@value/string()", format)); + } + + /** + * Method retrieves from the information system the zookeeper quorum of the Solr server + * @param isLookup + * @return the zookeeper quorum of the Solr server + * @throws ISLookUpException + */ + private static String getZkHost(ISLookUpService isLookup) throws ISLookUpException { + return doLookup(isLookup, "for $x in /RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='IndexServiceResourceType'] return $x//PROTOCOL[./@name='solr']/@address/string()"); + } + + private static String doLookup(ISLookUpService isLookup, String xquery) throws ISLookUpException { + log.info(String.format("running xquery: %s", xquery)); + final String res = isLookup.getResourceProfileByQuery(xquery); + log.info(String.format("got response (100 chars): %s", StringUtils.left(res, 100) + " ...")); + return res; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java index 38bc2bae2..0b2180f19 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlRecordBuilderJob.java @@ -1,6 +1,7 @@ package eu.dnetlib.dhp.graph; import eu.dnetlib.dhp.application.ArgumentApplicationParser; +import eu.dnetlib.dhp.graph.utils.ContextMapper; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -11,30 +12,37 @@ public class SparkXmlRecordBuilderJob { public static void main(String[] args) throws Exception { - final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkXmlRecordBuilderJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json"))); + final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkXmlRecordBuilderJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_params_build_adjacency_lists.json"))); parser.parseArgument(args); + final String master = parser.get("master"); final SparkConf conf = new SparkConf() .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); - final SparkSession spark = SparkSession + try(SparkSession spark = getSession(conf, master)) { + + final String inputPath = parser.get("sourcePath"); + final String outputPath = parser.get("outputPath"); + final String isLookupUrl = parser.get("isLookupUrl"); + + final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); + if (fs.exists(new Path(outputPath))) { + fs.delete(new Path(outputPath), true); + fs.mkdirs(new Path(outputPath)); + } + + new GraphJoiner(spark, ContextMapper.fromIS(isLookupUrl), inputPath, outputPath) + .adjacencyLists(); + } + } + + private static SparkSession getSession(SparkConf conf, String master) { + return SparkSession .builder() .config(conf) .appName(SparkXmlRecordBuilderJob.class.getSimpleName()) - .master(parser.get("master")) + .master(master) .getOrCreate(); - - final String inputPath = parser.get("sourcePath"); - final String outputPath = parser.get("outputPath"); - - final FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration()); - if (fs.exists(new Path(outputPath))) { - fs.delete(new Path(outputPath), true); - fs.mkdirs(new Path(outputPath)); - } - - new GraphJoiner(spark, inputPath, outputPath) - .adjacencyLists(); } } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java deleted file mode 100644 index 1eb0491a7..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/Tuple.java +++ /dev/null @@ -1,29 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import java.io.Serializable; - -public class Tuple implements Serializable { - - private TypedRow relation; - - private TypedRow target; - - - public TypedRow getRelation() { - return relation; - } - - public Tuple setRelation(TypedRow relation) { - this.relation = relation; - return this; - } - - public TypedRow getTarget() { - return target; - } - - public Tuple setTarget(TypedRow target) { - this.target = target; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java deleted file mode 100644 index eb60e1474..000000000 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TupleWrapper.java +++ /dev/null @@ -1,29 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import java.io.Serializable; - -public class TupleWrapper implements Serializable { - - private TypedRow relation; - - private TypedRow target; - - - public TypedRow getRelation() { - return relation; - } - - public TupleWrapper setRelation(TypedRow relation) { - this.relation = relation; - return this; - } - - public TypedRow getTarget() { - return target; - } - - public TupleWrapper setTarget(TypedRow target) { - this.target = target; - return this; - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/EntityRelEntity.java similarity index 96% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/EntityRelEntity.java index 285cacbc0..8c08337e2 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/EntityRelEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/EntityRelEntity.java @@ -1,4 +1,4 @@ -package eu.dnetlib.dhp.graph; +package eu.dnetlib.dhp.graph.model; import java.io.Serializable; @@ -15,7 +15,6 @@ public class EntityRelEntity implements Serializable { this.source = source; } - //helpers public Boolean hasMainEntity() { return getSource() != null & getRelation() == null & getTarget() == null; diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/JoinedEntity.java similarity index 65% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/JoinedEntity.java index d65eb64c8..f89273a0d 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/JoinedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/JoinedEntity.java @@ -1,11 +1,8 @@ -package eu.dnetlib.dhp.graph; +package eu.dnetlib.dhp.graph.model; import eu.dnetlib.dhp.schema.oaf.OafEntity; -import eu.dnetlib.dhp.schema.oaf.Relation; -import scala.Tuple2; import java.io.Serializable; -import java.util.List; public class JoinedEntity implements Serializable { @@ -13,7 +10,7 @@ public class JoinedEntity implements Serializable { private OafEntity entity; - private List> links; + private Links links; public String getType() { return type; @@ -33,11 +30,11 @@ public class JoinedEntity implements Serializable { return this; } - public List> getLinks() { + public Links getLinks() { return links; } - public JoinedEntity setLinks(List> links) { + public JoinedEntity setLinks(Links links) { this.links = links; return this; } diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Links.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Links.java new file mode 100644 index 000000000..96ad67b0c --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Links.java @@ -0,0 +1,6 @@ +package eu.dnetlib.dhp.graph.model; + +import java.util.ArrayList; + +public class Links extends ArrayList { +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/RelatedEntity.java similarity index 94% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/RelatedEntity.java index 50b97dace..baeff1c6a 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/RelatedEntity.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/RelatedEntity.java @@ -1,5 +1,6 @@ -package eu.dnetlib.dhp.graph; +package eu.dnetlib.dhp.graph.model; +import eu.dnetlib.dhp.schema.oaf.Instance; import eu.dnetlib.dhp.schema.oaf.KeyValue; import eu.dnetlib.dhp.schema.oaf.Qualifier; import eu.dnetlib.dhp.schema.oaf.StructuredProperty; @@ -25,6 +26,7 @@ public class RelatedEntity implements Serializable { private String codeRepositoryUrl; private Qualifier resulttype; private List collectedfrom; + private List instances; // datasource private String officialname; @@ -45,14 +47,6 @@ public class RelatedEntity implements Serializable { private Qualifier contracttype; private List fundingtree; - public static RelatedEntity parse(final String json) { - try { - return new ObjectMapper().readValue(json, RelatedEntity.class); - } catch (IOException e) { - throw new IllegalArgumentException("invalid RelatedEntity, cannot parse: " + json); - } - } - public String getId() { return id; } @@ -125,6 +119,15 @@ public class RelatedEntity implements Serializable { return this; } + public List getInstances() { + return instances; + } + + public RelatedEntity setInstances(List instances) { + this.instances = instances; + return this; + } + public String getOfficialname() { return officialname; } @@ -250,4 +253,5 @@ public class RelatedEntity implements Serializable { this.type = type; return this; } + } \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Tuple2.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Tuple2.java new file mode 100644 index 000000000..ab965808b --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/Tuple2.java @@ -0,0 +1,28 @@ +package eu.dnetlib.dhp.graph.model; + +import eu.dnetlib.dhp.schema.oaf.Relation; + +public class Tuple2 { + + private Relation relation; + + private RelatedEntity relatedEntity; + + public Relation getRelation() { + return relation; + } + + public Tuple2 setRelation(Relation relation) { + this.relation = relation; + return this; + } + + public RelatedEntity getRelatedEntity() { + return relatedEntity; + } + + public Tuple2 setRelatedEntity(RelatedEntity relatedEntity) { + this.relatedEntity = relatedEntity; + return this; + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/TypedRow.java similarity index 96% rename from dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java rename to dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/TypedRow.java index 1acbbce93..3651e28c9 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/TypedRow.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/model/TypedRow.java @@ -1,4 +1,4 @@ -package eu.dnetlib.dhp.graph; +package eu.dnetlib.dhp.graph.model; import java.io.Serializable; diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextDef.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextDef.java new file mode 100644 index 000000000..05d9456f6 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextDef.java @@ -0,0 +1,51 @@ +package eu.dnetlib.dhp.graph.utils; + +import java.io.Serializable; + +public class ContextDef implements Serializable { + + private String id; + private String label; + private String name; + private String type; + + public ContextDef(final String id, final String label, final String name, final String type) { + super(); + this.setId(id); + this.setLabel(label); + this.setName(name); + this.setType(type); + } + + public String getLabel() { + return label; + } + + public void setLabel(final String label) { + this.label = label; + } + + public String getId() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(final String type) { + this.type = type; + } +} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextMapper.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextMapper.java new file mode 100644 index 000000000..0c3a481d0 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ContextMapper.java @@ -0,0 +1,45 @@ +package eu.dnetlib.dhp.graph.utils; + +import com.google.common.base.Joiner; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; +import org.dom4j.Document; +import org.dom4j.DocumentException; +import org.dom4j.Node; +import org.dom4j.io.SAXReader; + +import java.io.Serializable; +import java.io.StringReader; +import java.util.HashMap; + +public class ContextMapper extends HashMap implements Serializable { + + private static final long serialVersionUID = 2159682308502487305L; + + private final static String XQUERY = "for $x in //RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='ContextDSResourceType']//*[name()='context' or name()='category' or name()='concept'] return "; + + public static ContextMapper fromIS(final String isLookupUrl) throws DocumentException, ISLookUpException { + ISLookUpService isLookUp = ISLookupClientFactory.getLookUpService(isLookupUrl); + StringBuilder sb = new StringBuilder(""); + Joiner.on("").appendTo(sb, isLookUp.quickSearchProfile(XQUERY)); + sb.append(""); + return fromXml(sb.toString()); + } + + public static ContextMapper fromXml(final String xml) throws DocumentException { + final ContextMapper contextMapper = new ContextMapper(); + + final Document doc = new SAXReader().read(new StringReader(xml)); + for (Object o : doc.selectNodes("//entry")) { + Node node = (Node) o; + String id = node.valueOf("./@id"); + String label = node.valueOf("./@label"); + String name = node.valueOf("./@name"); + String type = node.valueOf("./@type") + ""; + + contextMapper.put(id, new ContextDef(id, label, name, type)); + } + return contextMapper; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/GraphMappingUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/GraphMappingUtils.java new file mode 100644 index 000000000..0921fe105 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/GraphMappingUtils.java @@ -0,0 +1,254 @@ +package eu.dnetlib.dhp.graph.utils; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Predicate; +import com.google.common.collect.BiMap; +import com.google.common.collect.HashBiMap; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import com.jayway.jsonpath.DocumentContext; +import com.jayway.jsonpath.JsonPath; +import eu.dnetlib.dhp.graph.model.EntityRelEntity; +import eu.dnetlib.dhp.graph.model.RelatedEntity; +import eu.dnetlib.dhp.graph.model.TypedRow; +import eu.dnetlib.dhp.schema.oaf.*; +import net.minidev.json.JSONArray; +import org.apache.commons.lang3.StringUtils; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.apache.commons.lang3.StringUtils.*; + +public class GraphMappingUtils { + + public enum EntityType { + publication, dataset, otherresearchproduct, software, datasource, organization, project + } + + public enum MainEntityType { + result, datasource, organization, project + } + + public static Set authorPidTypes = Sets.newHashSet("orcid", "magidentifier"); + + public static Set instanceFieldFilter = Sets.newHashSet("instancetype", "hostedby", "license", "accessright", "collectedfrom", "dateofacceptance", "distributionlocation"); + + private static BiMap relClassMapping = HashBiMap.create(); + + static { + relClassMapping.put("isAuthorInstitutionOf", "hasAuthorInstitution"); + relClassMapping.put("isMergedIn", "merges"); + relClassMapping.put("isProducedBy", "produces"); + relClassMapping.put("hasParticipant", "isParticipant"); + relClassMapping.put("isProvidedBy", "provides"); + relClassMapping.put("isRelatedTo", "isRelatedTo"); + relClassMapping.put("isAmongTopNSimilarDocuments", "hasAmongTopNSimilarDocuments"); + relClassMapping.put("isRelatedTo", "isRelatedTo"); + relClassMapping.put("isSupplementTo", "isSupplementedBy"); + } + + public static String getInverseRelClass(final String relClass) { + String res = relClassMapping.get(relClass); + if (isNotBlank(res)) { + return res; + } + res = relClassMapping.inverse().get(relClass); + + if (isNotBlank(res)) { + return res; + } + + throw new IllegalArgumentException("unable to find an inverse relationship class for term: " + relClass); + } + + private static final String schemeTemplate = "dnet:%s_%s_relations"; + + private static Map entityMapping = Maps.newHashMap(); + + static { + entityMapping.put(EntityType.publication, MainEntityType.result); + entityMapping.put(EntityType.dataset, MainEntityType.result); + entityMapping.put(EntityType.otherresearchproduct, MainEntityType.result); + entityMapping.put(EntityType.software, MainEntityType.result); + entityMapping.put(EntityType.datasource, MainEntityType.datasource); + entityMapping.put(EntityType.organization, MainEntityType.organization); + entityMapping.put(EntityType.project, MainEntityType.project); + } + + public static String getScheme(final String sourceType, final String targetType) { + return String.format(schemeTemplate, + entityMapping.get(EntityType.valueOf(sourceType)).name(), + entityMapping.get(EntityType.valueOf(targetType)).name()); + } + + public static String getMainType(final String type) { + return entityMapping.get(EntityType.valueOf(type)).name(); + } + + public static boolean isResult(String type) { + return MainEntityType.result.name().equals(getMainType(type)); + } + + public static Predicate instanceFilter = s -> instanceFieldFilter.contains(s); + + public static EntityRelEntity asRelatedEntity(EntityRelEntity e) { + + final DocumentContext j = JsonPath.parse(e.getSource().getOaf()); + final RelatedEntity re = new RelatedEntity().setId(j.read("$.id")).setType(e.getSource().getType()); + + switch (EntityType.valueOf(e.getSource().getType())) { + case publication: + case dataset: + case otherresearchproduct: + case software: + mapTitle(j, re); + re.setDateofacceptance(j.read("$.dateofacceptance.value")); + re.setPublisher(j.read("$.publisher.value")); + + JSONArray pids = j.read("$.pid"); + re.setPid(pids.stream() + .map(p -> asStructuredProperty((LinkedHashMap) p)) + .collect(Collectors.toList())); + + re.setResulttype(asQualifier(j.read("$.resulttype"))); + + JSONArray collfrom = j.read("$.collectedfrom"); + re.setCollectedfrom(collfrom.stream() + .map(c -> asKV((LinkedHashMap) c)) + .collect(Collectors.toList())); + + // will throw exception when the instance is not found + JSONArray instances = j.read("$.instance"); + re.setInstances(instances.stream() + .map(i -> { + final LinkedHashMap p = (LinkedHashMap) i; + final Field license = new Field(); + license.setValue((String) ((LinkedHashMap) p.get("license")).get("value")); + final Instance instance = new Instance(); + instance.setLicense(license); + instance.setAccessright(asQualifier((LinkedHashMap) p.get("accessright"))); + instance.setInstancetype(asQualifier((LinkedHashMap) p.get("instancetype"))); + instance.setHostedby(asKV((LinkedHashMap) p.get("hostedby"))); + //TODO mapping of distributionlocation + instance.setCollectedfrom(asKV((LinkedHashMap) p.get("collectedfrom"))); + + Field dateofacceptance = new Field(); + dateofacceptance.setValue((String) ((LinkedHashMap) p.get("dateofacceptance")).get("value")); + instance.setDateofacceptance(dateofacceptance); + return instance; + }).collect(Collectors.toList())); + + //TODO still to be mapped + //re.setCodeRepositoryUrl(j.read("$.coderepositoryurl")); + + break; + case datasource: + re.setOfficialname(j.read("$.officialname.value")); + re.setWebsiteurl(j.read("$.websiteurl.value")); + re.setDatasourcetype(asQualifier(j.read("$.datasourcetype"))); + re.setOpenairecompatibility(asQualifier(j.read("$.openairecompatibility"))); + + break; + case organization: + re.setLegalname(j.read("$.legalname.value")); + re.setLegalshortname(j.read("$.legalshortname.value")); + re.setCountry(asQualifier(j.read("$.country"))); + + break; + case project: + re.setProjectTitle(j.read("$.title.value")); + re.setCode(j.read("$.code.value")); + re.setAcronym(j.read("$.acronym.value")); + re.setContracttype(asQualifier(j.read("$.contracttype"))); + + JSONArray f = j.read("$.fundingtree"); + if (!f.isEmpty()) { + re.setFundingtree(f.stream() + .map(s -> ((LinkedHashMap) s).get("value")) + .collect(Collectors.toList())); + } + + break; + } + return new EntityRelEntity().setSource( + new TypedRow() + .setSourceId(e.getSource().getSourceId()) + .setDeleted(e.getSource().getDeleted()) + .setType(e.getSource().getType()) + .setOaf(serialize(re))); + } + + private static KeyValue asKV(LinkedHashMap j) { + final KeyValue kv = new KeyValue(); + kv.setKey((String) j.get("key")); + kv.setValue((String) j.get("value")); + return kv; + } + + private static void mapTitle(DocumentContext j, RelatedEntity re) { + final JSONArray a = j.read("$.title"); + if (!a.isEmpty()) { + final StructuredProperty sp = asStructuredProperty((LinkedHashMap) a.get(0)); + if (StringUtils.isNotBlank(sp.getValue())) { + re.setTitle(sp); + } + } + } + + private static StructuredProperty asStructuredProperty(LinkedHashMap j) { + final StructuredProperty sp = new StructuredProperty(); + final String value = (String) j.get("value"); + if (StringUtils.isNotBlank(value)) { + sp.setValue((String) j.get("value")); + sp.setQualifier(asQualifier((LinkedHashMap) j.get("qualifier"))); + } + return sp; + } + + public static Qualifier asQualifier(LinkedHashMap j) { + final Qualifier q = new Qualifier(); + + final String classid = j.get("classid"); + if (StringUtils.isNotBlank(classid)) { + q.setClassid(classid); + } + + final String classname = j.get("classname"); + if (StringUtils.isNotBlank(classname)) { + q.setClassname(classname); + } + + final String schemeid = j.get("schemeid"); + if (StringUtils.isNotBlank(schemeid)) { + q.setSchemeid(schemeid); + } + + final String schemename = j.get("schemename"); + if (StringUtils.isNotBlank(schemename)) { + q.setSchemename(schemename); + } + return q; + } + + public static String serialize(final Object o) { + try { + return new ObjectMapper() + .setSerializationInclusion(JsonInclude.Include.NON_NULL) + .writeValueAsString(o); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("unable to serialize: " + o.toString(), e); + } + } + + public static String removePrefix(final String s) { + if (s.contains("|")) return substringAfter(s, "|"); + return s; + } + + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ISLookupClientFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ISLookupClientFactory.java new file mode 100644 index 000000000..d87f29452 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/ISLookupClientFactory.java @@ -0,0 +1,24 @@ +package eu.dnetlib.dhp.graph.utils; + +import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.cxf.jaxws.JaxWsProxyFactoryBean; + +public class ISLookupClientFactory { + + private static final Log log = LogFactory.getLog(ISLookupClientFactory.class); + + public static ISLookUpService getLookUpService(final String isLookupUrl) { + return getServiceStub(ISLookUpService.class, isLookupUrl); + } + + @SuppressWarnings("unchecked") + private static T getServiceStub(final Class clazz, final String endpoint) { + log.info(String.format("creating %s stub from %s", clazz.getName(), endpoint)); + final JaxWsProxyFactoryBean jaxWsProxyFactory = new JaxWsProxyFactoryBean(); + jaxWsProxyFactory.setServiceClass(clazz); + jaxWsProxyFactory.setAddress(endpoint); + return (T) jaxWsProxyFactory.create(); + } +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/LicenseComparator.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/LicenseComparator.java new file mode 100644 index 000000000..c4cbfadea --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/LicenseComparator.java @@ -0,0 +1,49 @@ +package eu.dnetlib.dhp.graph.utils; + +import eu.dnetlib.dhp.schema.oaf.Qualifier; + +import java.util.Comparator; + +public class LicenseComparator implements Comparator { + + @Override + public int compare(Qualifier left, Qualifier right) { + + if (left == null && right == null) return 0; + if (left == null) return 1; + if (right == null) return -1; + + String lClass = left.getClassid(); + String rClass = right.getClassid(); + + if (lClass.equals(rClass)) return 0; + + if (lClass.equals("OPEN SOURCE")) return -1; + if (rClass.equals("OPEN SOURCE")) return 1; + + if (lClass.equals("OPEN")) return -1; + if (rClass.equals("OPEN")) return 1; + + if (lClass.equals("6MONTHS")) return -1; + if (rClass.equals("6MONTHS")) return 1; + + if (lClass.equals("12MONTHS")) return -1; + if (rClass.equals("12MONTHS")) return 1; + + if (lClass.equals("EMBARGO")) return -1; + if (rClass.equals("EMBARGO")) return 1; + + if (lClass.equals("RESTRICTED")) return -1; + if (rClass.equals("RESTRICTED")) return 1; + + if (lClass.equals("CLOSED")) return -1; + if (rClass.equals("CLOSED")) return 1; + + if (lClass.equals("UNKNOWN")) return -1; + if (rClass.equals("UNKNOWN")) return 1; + + // Else (but unlikely), lexicographical ordering will do. + return lClass.compareTo(rClass); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/StreamingInputDocumentFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/StreamingInputDocumentFactory.java new file mode 100644 index 000000000..736c9fc28 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/StreamingInputDocumentFactory.java @@ -0,0 +1,253 @@ +package eu.dnetlib.dhp.graph.utils; + +import java.io.StringReader; +import java.io.StringWriter; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import javax.xml.stream.*; +import javax.xml.stream.events.Namespace; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; + +import com.google.common.collect.Lists; +import org.apache.solr.common.SolrInputDocument; + +/** + * Optimized version of the document parser, drop in replacement of InputDocumentFactory. + * + *

+ * Faster because: + *

+ *
    + *
  • Doesn't create a DOM for the full document
  • + *
  • Doesn't execute xpaths agains the DOM
  • + *
  • Quickly serialize the 'result' element directly in a string.
  • + *
  • Uses less memory: less pressure on GC and allows more threads to process this in parallel
  • + *
+ * + *

+ * This class is fully reentrant and can be invoked in parallel. + *

+ * + * @author claudio + * + */ +public class StreamingInputDocumentFactory { + + private static final String INDEX_FIELD_PREFIX = "__"; + + private static final String DS_VERSION = INDEX_FIELD_PREFIX + "dsversion"; + + private static final String DS_ID = INDEX_FIELD_PREFIX + "dsid"; + + private static final String RESULT = "result"; + + private static final String INDEX_RESULT = INDEX_FIELD_PREFIX + RESULT; + + private static final String INDEX_RECORD_ID = INDEX_FIELD_PREFIX + "indexrecordidentifier"; + + private static final String outFormat = new String("yyyy-MM-dd'T'hh:mm:ss'Z'"); + + private final static List dateFormats = Arrays.asList("yyyy-MM-dd'T'hh:mm:ss", "yyyy-MM-dd", "dd-MM-yyyy", "dd/MM/yyyy", "yyyy"); + + private static final String DEFAULTDNETRESULT = "dnetResult"; + + private static final String TARGETFIELDS = "targetFields"; + + private static final String INDEX_RECORD_ID_ELEMENT = "indexRecordIdentifier"; + + private static final String ROOT_ELEMENT = "indexRecord"; + + private static final int MAX_FIELD_LENGTH = 25000; + + private ThreadLocal inputFactory = ThreadLocal.withInitial(() -> XMLInputFactory.newInstance()); + + private ThreadLocal outputFactory = ThreadLocal.withInitial(() -> XMLOutputFactory.newInstance()); + + private ThreadLocal eventFactory = ThreadLocal.withInitial(() -> XMLEventFactory.newInstance()); + + private String version; + + private String dsId; + + private String resultName = DEFAULTDNETRESULT; + + public StreamingInputDocumentFactory(final String version, final String dsId) { + this(version, dsId, DEFAULTDNETRESULT); + } + + public StreamingInputDocumentFactory(final String version, final String dsId, final String resultName) { + this.version = version; + this.dsId = dsId; + this.resultName = resultName; + } + + public SolrInputDocument parseDocument(final String inputDocument) { + + final StringWriter results = new StringWriter(); + final List nsList = Lists.newLinkedList(); + try { + + XMLEventReader parser = inputFactory.get().createXMLEventReader(new StringReader(inputDocument)); + + final SolrInputDocument indexDocument = new SolrInputDocument(new HashMap<>()); + + while (parser.hasNext()) { + final XMLEvent event = parser.nextEvent(); + if ((event != null) && event.isStartElement()) { + final String localName = event.asStartElement().getName().getLocalPart(); + + if (ROOT_ELEMENT.equals(localName)) { + nsList.addAll(getNamespaces(event)); + } else if (INDEX_RECORD_ID_ELEMENT.equals(localName)) { + final XMLEvent text = parser.nextEvent(); + String recordId = getText(text); + indexDocument.addField(INDEX_RECORD_ID, recordId); + } else if (TARGETFIELDS.equals(localName)) { + parseTargetFields(indexDocument, parser); + } else if (resultName.equals(localName)) { + copyResult(indexDocument, results, parser, nsList, resultName); + } + } + } + + if (version != null) { + indexDocument.addField(DS_VERSION, version); + } + + if (dsId != null) { + indexDocument.addField(DS_ID, dsId); + } + + if (!indexDocument.containsKey(INDEX_RECORD_ID)) { + indexDocument.clear(); + System.err.println("missing indexrecord id:\n" + inputDocument); + } + + return indexDocument; + } catch (XMLStreamException e) { + return new SolrInputDocument(); + } + } + + private List getNamespaces(final XMLEvent event) { + final List res = Lists.newLinkedList(); + @SuppressWarnings("unchecked") + Iterator nsIter = event.asStartElement().getNamespaces(); + while (nsIter.hasNext()) { + Namespace ns = nsIter.next(); + res.add(ns); + } + return res; + } + + /** + * Parse the targetFields block and add fields to the solr document. + * + * @param indexDocument + * @param parser + * @throws XMLStreamException + */ + protected void parseTargetFields(final SolrInputDocument indexDocument, final XMLEventReader parser) throws XMLStreamException { + + boolean hasFields = false; + + while (parser.hasNext()) { + final XMLEvent targetEvent = parser.nextEvent(); + if (targetEvent.isEndElement() && targetEvent.asEndElement().getName().getLocalPart().equals(TARGETFIELDS)) { + break; + } + + if (targetEvent.isStartElement()) { + final String fieldName = targetEvent.asStartElement().getName().getLocalPart(); + final XMLEvent text = parser.nextEvent(); + + String data = getText(text); + + addField(indexDocument, fieldName, data); + hasFields = true; + } + } + + if (!hasFields) { + indexDocument.clear(); + } + } + + /** + * Copy the /indexRecord/result element and children, preserving namespace declarations etc. + * + * @param indexDocument + * @param results + * @param parser + * @param nsList + * @throws XMLStreamException + */ + protected void copyResult(final SolrInputDocument indexDocument, + final StringWriter results, + final XMLEventReader parser, + final List nsList, + final String dnetResult) throws XMLStreamException { + final XMLEventWriter writer = outputFactory.get().createXMLEventWriter(results); + + for (Namespace ns : nsList) { + eventFactory.get().createNamespace(ns.getPrefix(), ns.getNamespaceURI()); + } + + StartElement newRecord = eventFactory.get().createStartElement("", null, RESULT, null, nsList.iterator()); + + // new root record + writer.add(newRecord); + + // copy the rest as it is + while (parser.hasNext()) { + final XMLEvent resultEvent = parser.nextEvent(); + + // TODO: replace with depth tracking instead of close tag tracking. + if (resultEvent.isEndElement() && resultEvent.asEndElement().getName().getLocalPart().equals(dnetResult)) { + writer.add(eventFactory.get().createEndElement("", null, RESULT)); + break; + } + + writer.add(resultEvent); + } + writer.close(); + indexDocument.addField(INDEX_RESULT, results.toString()); + } + + /** + * Helper used to add a field to a solr doc. It avoids to add empy fields + * + * @param indexDocument + * @param field + * @param value + */ + private final void addField(final SolrInputDocument indexDocument, final String field, final String value) { + String cleaned = value.trim(); + if (!cleaned.isEmpty()) { + // log.info("\n\n adding field " + field.toLowerCase() + " value: " + cleaned + "\n"); + indexDocument.addField(field.toLowerCase(), cleaned); + } + } + + /** + * Helper used to get the string from a text element. + * + * @param text + * @return the + */ + protected final String getText(final XMLEvent text) { + if (text.isEndElement()) // log.warn("skipping because isEndOfElement " + text.asEndElement().getName().getLocalPart()); + return ""; + + final String data = text.asCharacters().getData(); + if (data != null && data.length() > MAX_FIELD_LENGTH) { + return data.substring(0, MAX_FIELD_LENGTH); + } + + return data; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateFactory.java new file mode 100644 index 000000000..27c55fab7 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateFactory.java @@ -0,0 +1,107 @@ +package eu.dnetlib.dhp.graph.utils; + +import eu.dnetlib.dhp.schema.oaf.DataInfo; +import eu.dnetlib.dhp.schema.oaf.OafEntity; +import org.apache.commons.lang3.StringUtils; +import org.stringtemplate.v4.ST; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import static eu.dnetlib.dhp.graph.utils.GraphMappingUtils.removePrefix; +import static eu.dnetlib.dhp.graph.utils.XmlSerializationUtils.escapeXml; + +public class TemplateFactory { + + private TemplateResources resources; + + private final static char DELIMITER = '$'; + + public TemplateFactory() { + try { + resources = new TemplateResources(); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } + + public String buildBody(final String type, final List metadata, final List rels, final List children, final List extraInfo) { + ST body = getTemplate(resources.getEntity()); + + body.add("name", type); + body.add("metadata", metadata); + body.add("rels", rels); + body.add("children", children); + body.add("extrainfo", extraInfo); + + return body.render(); + } + + public String getChild(final String name, final String id, final List metadata) { + return getTemplate(resources.getChild()) + .add("name", name) + .add("hasId", !(id == null)) + .add("id", id != null ? escapeXml(removePrefix(id)) : "") + .add("metadata", metadata) + .render(); + } + + public String buildRecord( + final OafEntity entity, + final String schemaLocation, + final String body) { + return getTemplate(resources.getRecord()) + .add("id", escapeXml(removePrefix(entity.getId()))) + .add("dateofcollection", entity.getDateofcollection()) + .add("dateoftransformation", entity.getDateoftransformation()) + .add("schemaLocation", schemaLocation) + .add("it", body) + .render(); + } + + public String getRel(final String type, + final String objIdentifier, + final Collection fields, + final String semanticclass, + final String semantischeme, + final DataInfo info) { + return getTemplate(resources.getRel()) + .add("type", type) + .add("objIdentifier", escapeXml(removePrefix(objIdentifier))) + .add("class", semanticclass) + .add("scheme", semantischeme) + .add("metadata", fields) + .add("inferred", info.getInferred()) + .add("trust", info.getTrust()) + .add("inferenceprovenance", info.getInferenceprovenance()) + .add("provenanceaction", info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "") + .render(); + } + + public String getInstance(final String resultId, final List instancemetadata, final List webresources) { + return getTemplate(resources.getInstance()) + .add("instanceId", escapeXml(removePrefix(resultId))) + .add("metadata", instancemetadata) + .add("webresources", webresources + .stream() + .filter(StringUtils::isNotBlank) + .map(w -> getWebResource(w)) + .collect(Collectors.toList())) + .render(); + } + + private String getWebResource(final String identifier) { + return getTemplate(resources.getWebresource()) + .add("identifier", escapeXml(identifier)) + .render(); + } + + // HELPERS + + private ST getTemplate(final String res) { + return new ST(res, DELIMITER, DELIMITER); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateResources.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateResources.java new file mode 100644 index 000000000..92aaedfd3 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/TemplateResources.java @@ -0,0 +1,54 @@ +package eu.dnetlib.dhp.graph.utils; + +import com.google.common.io.Resources; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +public class TemplateResources { + + private String record = read("eu/dnetlib/dhp/graph/template/record.st"); + + private String instance = read("eu/dnetlib/dhp/graph/template/instance.st"); + + private String rel = read("eu/dnetlib/dhp/graph/template/rel.st"); + + private String webresource = read("eu/dnetlib/dhp/graph/template/webresource.st"); + + private String child = read("eu/dnetlib/dhp/graph/template/child.st"); + + private String entity = read("eu/dnetlib/dhp/graph/template/entity.st"); + + private static String read(final String classpathResource) throws IOException { + return Resources.toString(Resources.getResource(classpathResource), StandardCharsets.UTF_8); + } + + public TemplateResources() throws IOException { + + } + + public String getEntity() { + return entity; + } + + public String getRecord() { + return record; + } + + public String getInstance() { + return instance; + } + + public String getRel() { + return rel; + } + + public String getWebresource() { + return webresource; + } + + public String getChild() { + return child; + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java new file mode 100644 index 000000000..bd4f8ec6c --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java @@ -0,0 +1,962 @@ +package eu.dnetlib.dhp.graph.utils; + +import com.google.common.base.Joiner; +import com.google.common.base.Splitter; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.mycila.xmltool.XMLDoc; +import com.mycila.xmltool.XMLTag; +import eu.dnetlib.dhp.graph.model.JoinedEntity; +import eu.dnetlib.dhp.graph.model.RelatedEntity; +import eu.dnetlib.dhp.graph.model.Tuple2; +import eu.dnetlib.dhp.schema.oaf.Result; +import eu.dnetlib.dhp.schema.oaf.*; +import org.dom4j.Document; +import org.dom4j.DocumentException; +import org.dom4j.Element; +import org.dom4j.Node; +import org.dom4j.io.OutputFormat; +import org.dom4j.io.SAXReader; +import org.dom4j.io.XMLWriter; + +import javax.xml.transform.*; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import java.io.IOException; +import java.io.Serializable; +import java.io.StringReader; +import java.io.StringWriter; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static eu.dnetlib.dhp.graph.utils.GraphMappingUtils.*; +import static eu.dnetlib.dhp.graph.utils.XmlSerializationUtils.*; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.substringBefore; + +public class XmlRecordFactory implements Serializable { + + private Set specialDatasourceTypes; + + private ContextMapper contextMapper; + + private String schemaLocation; + + private Set contextes = Sets.newHashSet(); + + private boolean indent = false; + + public XmlRecordFactory( + final ContextMapper contextMapper, final boolean indent, + final String schemaLocation, final Set otherDatasourceTypesUForUI) { + + this.contextMapper = contextMapper; + this.schemaLocation = schemaLocation; + this.specialDatasourceTypes = otherDatasourceTypesUForUI; + + this.indent = indent; + } + + public String build(final JoinedEntity je) { + final OafEntity entity = je.getEntity(); + TemplateFactory templateFactory = new TemplateFactory(); + try { + final List metadata = metadata(je.getType(), entity); + + // rels has to be processed before the contexts because they enrich the contextMap with the funding info. + final List relations = listRelations(je, templateFactory); + + metadata.addAll(buildContexts(getMainType(je.getType()))); + metadata.add(parseDataInfo(entity.getDataInfo())); + + final String body = templateFactory.buildBody( + getMainType(je.getType()), + metadata, + relations, + listChildren(je, templateFactory), listExtraInfo(je)); + + return printXML(templateFactory.buildRecord(entity, schemaLocation, body), indent); + } catch (final Throwable e) { + throw new RuntimeException(String.format("error building record '%s'", entity.getId()), e); + } + } + + private String printXML(String xml, boolean indent) { + try { + final Document doc = new SAXReader().read(new StringReader(xml)); + OutputFormat format = indent ? OutputFormat.createPrettyPrint() : OutputFormat.createCompactFormat(); + format.setExpandEmptyElements(false); + format.setSuppressDeclaration(true); + StringWriter sw = new StringWriter(); + XMLWriter writer = new XMLWriter(sw, format); + writer.write(doc); + return sw.toString(); + } catch (IOException | DocumentException e) { + throw new IllegalArgumentException("Unable to indent XML. Invalid record:\n" + xml, e); + } + } + + private List metadata(final String type, final OafEntity entity) { + + final List metadata = Lists.newArrayList(); + + if (entity.getCollectedfrom() != null) { + metadata.addAll(entity.getCollectedfrom() + .stream() + .map(kv -> mapKeyValue("collectedfrom", kv)) + .collect(Collectors.toList())); + } + if (entity.getOriginalId() != null) { + metadata.addAll(entity.getOriginalId() + .stream() + .map(s -> asXmlElement("originalId", s)) + .collect(Collectors.toList())); + } + if (entity.getPid() != null) { + metadata.addAll(entity.getPid() + .stream() + .map(p -> mapStructuredProperty("pid", p)) + .collect(Collectors.toList())); + } + + if (GraphMappingUtils.isResult(type)) { + final Result r = (Result) entity; + + if (r.getTitle() != null) { + metadata.addAll(r.getTitle() + .stream() + .map(t -> mapStructuredProperty("title", t)) + .collect(Collectors.toList())); + } + if (r.getAuthor() != null) { + metadata.addAll(r.getAuthor() + .stream() + .map(a -> { + final StringBuilder sb = new StringBuilder(" isNotBlank(sp.getQualifier().getClassid()) && isNotBlank(sp.getValue())) + .forEach(sp -> { + String pidType = escapeXml(sp.getQualifier().getClassid()).replaceAll("\\W", ""); + String pidValue = escapeXml(sp.getValue()); + + // ugly hack: some records provide swapped pidtype and pidvalue + if (authorPidTypes.contains(pidValue.toLowerCase().trim())) { + sb.append(String.format(" %s=\"%s\"", pidValue, pidType)); + } else { + pidType = pidType.replaceAll("\\W", "").replaceAll("\\d", ""); + if (isNotBlank(pidType)) { + sb.append(String.format(" %s=\"%s\"", + pidType, + pidValue.toLowerCase().replaceAll("orcid", ""))); + } + } + }); + } + sb.append(">" + escapeXml(a.getFullname()) + ""); + return sb.toString(); + }).collect(Collectors.toList())); + } + if (r.getContributor() != null) { + metadata.addAll(r.getContributor() + .stream() + .map(c -> asXmlElement("contributor", c.getValue())) + .collect(Collectors.toList())); + } + if (r.getCountry() != null) { + metadata.addAll(r.getCountry() + .stream() + .map(c -> mapQualifier("country", c)) + .collect(Collectors.toList())); + } + if (r.getCoverage() != null) { + metadata.addAll(r.getCoverage() + .stream() + .map(c -> asXmlElement("coverage", c.getValue())) + .collect(Collectors.toList())); + } + if (r.getDateofacceptance() != null) { + metadata.add(asXmlElement("dateofacceptance", r.getDateofacceptance().getValue())); + } + if (r.getDescription() != null) { + metadata.addAll(r.getDescription() + .stream() + .map(c -> asXmlElement("description", c.getValue())) + .collect(Collectors.toList())); + } + if (r.getEmbargoenddate() != null) { + metadata.add(asXmlElement("embargoenddate", r.getEmbargoenddate().getValue())); + } + if (r.getSubject() != null) { + metadata.addAll(r.getSubject() + .stream() + .map(s -> mapStructuredProperty("subject", s)) + .collect(Collectors.toList())); + } + if (r.getLanguage() != null) { + metadata.add(mapQualifier("language", r.getLanguage())); + } + if (r.getRelevantdate() != null) { + metadata.addAll(r.getRelevantdate() + .stream() + .map(s -> mapStructuredProperty("relevantdate", s)) + .collect(Collectors.toList())); + } + if (r.getPublisher() != null) { + metadata.add(asXmlElement("publisher", r.getPublisher().getValue())); + } + if (r.getSource() != null) { + metadata.addAll(r.getSource() + .stream() + .map(c -> asXmlElement("source", c.getValue())) + .collect(Collectors.toList())); + } + if (r.getFormat() != null) { + metadata.addAll(r.getFormat() + .stream() + .map(c -> asXmlElement("format", c.getValue())) + .collect(Collectors.toList())); + } + if (r.getResulttype() != null) { + metadata.add(mapQualifier("resulttype", r.getResulttype())); + } + if (r.getResourcetype() != null) { + metadata.add(mapQualifier("resourcetype", r.getResourcetype())); + } + if (r.getRefereed() != null) { + metadata.add(asXmlElement("refereed", r.getRefereed().getValue())); + } + if (r.getProcessingchargeamount() != null) { + metadata.add(asXmlElement("processingchargeamount", r.getProcessingchargeamount().getValue())); + } + if (r.getProcessingchargecurrency() != null) { + metadata.add(asXmlElement("processingchargecurrency", r.getProcessingchargecurrency().getValue())); + } + + metadata.add(mapQualifier("bestaccessright", getBestAccessright(r))); + + if (r.getContext() != null) { + contextes.addAll(r.getContext() + .stream() + .map(c -> c.getId()) + .collect(Collectors.toList())); + if (contextes.contains("dh-ch::subcommunity::2")) { + contextes.add("clarin"); + } + } + } + + switch (EntityType.valueOf(type)) { + case publication: + final Publication pub = (Publication) entity; + + if (pub.getJournal() != null) { + final Journal j = pub.getJournal(); + metadata.add(mapJournal(j)); + } + + break; + case dataset: + final Dataset d = (Dataset) entity; + if (d.getDevice() != null) { + metadata.add(asXmlElement("device", d.getDevice().getValue())); + } + if (d.getLastmetadataupdate() != null) { + metadata.add(asXmlElement("lastmetadataupdate", d.getLastmetadataupdate().getValue())); + } + if (d.getMetadataversionnumber() != null) { + metadata.add(asXmlElement("metadataversionnumber", d.getMetadataversionnumber().getValue())); + } + if (d.getSize() != null) { + metadata.add(asXmlElement("size", d.getSize().getValue())); + } + if (d.getStoragedate() != null) { + metadata.add(asXmlElement("storagedate", d.getStoragedate().getValue())); + } + if (d.getVersion() != null) { + metadata.add(asXmlElement("version", d.getVersion().getValue())); + } + //TODO d.getGeolocation() + + break; + case otherresearchproduct: + final OtherResearchProduct orp = (OtherResearchProduct) entity; + + if (orp.getContactperson() != null) { + metadata.addAll(orp.getContactperson() + .stream() + .map(c -> asXmlElement("contactperson", c.getValue())) + .collect(Collectors.toList())); + } + + if (orp.getContactgroup() != null) { + metadata.addAll(orp.getContactgroup() + .stream() + .map(c -> asXmlElement("contactgroup", c.getValue())) + .collect(Collectors.toList())); + } + if (orp.getTool() != null) { + metadata.addAll(orp.getTool() + .stream() + .map(c -> asXmlElement("tool", c.getValue())) + .collect(Collectors.toList())); + } + break; + case software: + final Software s = (Software) entity; + + if (s.getDocumentationUrl() != null) { + metadata.addAll(s.getDocumentationUrl() + .stream() + .map(c -> asXmlElement("documentationUrl", c.getValue())) + .collect(Collectors.toList())); + } + if (s.getLicense() != null) { + metadata.addAll(s.getLicense() + .stream() + .map(l -> mapStructuredProperty("license", l)) + .collect(Collectors.toList())); + } + if (s.getCodeRepositoryUrl() != null) { + metadata.add(asXmlElement("codeRepositoryUrl", s.getCodeRepositoryUrl().getValue())); + } + if (s.getProgrammingLanguage() != null) { + metadata.add(mapQualifier("programmingLanguage", s.getProgrammingLanguage())); + } + break; + case datasource: + final Datasource ds = (Datasource) entity; + + if (ds.getDatasourcetype() != null) { + mapDatasourceType(metadata, ds.getDatasourcetype()); + } + if (ds.getOpenairecompatibility() != null) { + metadata.add(mapQualifier("openairecompatibility", ds.getOpenairecompatibility())); + } + if (ds.getOfficialname() != null) { + metadata.add(asXmlElement("officialname", ds.getOfficialname().getValue())); + } + if (ds.getEnglishname() != null) { + metadata.add(asXmlElement("englishname", ds.getEnglishname().getValue())); + } + if (ds.getWebsiteurl() != null) { + metadata.add(asXmlElement("websiteurl", ds.getWebsiteurl().getValue())); + } + if (ds.getLogourl() != null) { + metadata.add(asXmlElement("logourl", ds.getLogourl().getValue())); + } + if (ds.getContactemail() != null) { + metadata.add(asXmlElement("contactemail", ds.getContactemail().getValue())); + } + if (ds.getNamespaceprefix() != null) { + metadata.add(asXmlElement("namespaceprefix", ds.getNamespaceprefix().getValue())); + } + if (ds.getLatitude() != null) { + metadata.add(asXmlElement("latitude", ds.getLatitude().getValue())); + } + if (ds.getLongitude() != null) { + metadata.add(asXmlElement("longitude", ds.getLongitude().getValue())); + } + if (ds.getDateofvalidation() != null) { + metadata.add(asXmlElement("dateofvalidation", ds.getDateofvalidation().getValue())); + } + if (ds.getDescription() != null) { + metadata.add(asXmlElement("description", ds.getDescription().getValue())); + } + if (ds.getOdnumberofitems() != null) { + metadata.add(asXmlElement("odnumberofitems", ds.getOdnumberofitems().getValue())); + } + if (ds.getOdnumberofitemsdate() != null) { + metadata.add(asXmlElement("odnumberofitemsdate", ds.getOdnumberofitemsdate().getValue())); + } + if (ds.getOdpolicies() != null) { + metadata.add(asXmlElement("odpolicies", ds.getOdpolicies().getValue())); + } + if (ds.getOdlanguages() != null) { + metadata.addAll(ds.getOdlanguages() + .stream() + .map(c -> asXmlElement("odlanguages", c.getValue())) + .collect(Collectors.toList())); + } + if (ds.getOdcontenttypes() != null) { + metadata.addAll(ds.getOdcontenttypes() + .stream() + .map(c -> asXmlElement("odcontenttypes", c.getValue())) + .collect(Collectors.toList())); + } + if (ds.getAccessinfopackage() != null) { + metadata.addAll(ds.getAccessinfopackage() + .stream() + .map(c -> asXmlElement("accessinfopackage", c.getValue())) + .collect(Collectors.toList())); + } + if (ds.getReleaseenddate() != null) { + metadata.add(asXmlElement("releasestartdate", ds.getReleaseenddate().getValue())); + } + if (ds.getReleaseenddate() != null) { + metadata.add(asXmlElement("releaseenddate", ds.getReleaseenddate().getValue())); + } + if (ds.getMissionstatementurl() != null) { + metadata.add(asXmlElement("missionstatementurl", ds.getMissionstatementurl().getValue())); + } + if (ds.getDataprovider() != null) { + metadata.add(asXmlElement("dataprovider", ds.getDataprovider().getValue().toString())); + } + if (ds.getServiceprovider() != null) { + metadata.add(asXmlElement("serviceprovider", ds.getServiceprovider().getValue().toString())); + } + if (ds.getDatabaseaccesstype() != null) { + metadata.add(asXmlElement("databaseaccesstype", ds.getDatabaseaccesstype().getValue())); + } + if (ds.getDatauploadtype() != null) { + metadata.add(asXmlElement("datauploadtype", ds.getDatauploadtype().getValue())); + } + if (ds.getDatabaseaccessrestriction() != null) { + metadata.add(asXmlElement("databaseaccessrestriction", ds.getDatabaseaccessrestriction().getValue())); + } + if (ds.getDatauploadrestriction() != null) { + metadata.add(asXmlElement("datauploadrestriction", ds.getDatauploadrestriction().getValue())); + } + if (ds.getVersioning() != null) { + metadata.add(asXmlElement("versioning", ds.getVersioning().getValue().toString())); + } + if (ds.getCitationguidelineurl() != null) { + metadata.add(asXmlElement("citationguidelineurl", ds.getCitationguidelineurl().getValue())); + } + if (ds.getQualitymanagementkind() != null) { + metadata.add(asXmlElement("qualitymanagementkind", ds.getQualitymanagementkind().getValue())); + } + if (ds.getPidsystems() != null) { + metadata.add(asXmlElement("pidsystems", ds.getPidsystems().getValue())); + } + if (ds.getCertificates() != null) { + metadata.add(asXmlElement("certificates", ds.getCertificates().getValue())); + } + if (ds.getPolicies() != null) { + metadata.addAll(ds.getPolicies() + .stream() + .map(kv -> mapKeyValue("policies", kv)) + .collect(Collectors.toList())); + } + if (ds.getJournal() != null) { + metadata.add(mapJournal(ds.getJournal())); + } + if (ds.getSubjects() != null) { + metadata.addAll(ds.getSubjects() + .stream() + .map(sp -> mapStructuredProperty("subject", sp)) + .collect(Collectors.toList())); + } + + break; + case organization: + final Organization o = (Organization) entity; + + if (o.getLegalshortname() != null) { + metadata.add(asXmlElement("legalshortname", o.getLegalshortname().getValue())); + } + if (o.getLegalname() != null) { + metadata.add(asXmlElement("legalname", o.getLegalname().getValue())); + } + if (o.getAlternativeNames() != null) { + metadata.addAll(o.getAlternativeNames() + .stream() + .map(c -> asXmlElement("alternativeNames", c.getValue())) + .collect(Collectors.toList())); + } + if (o.getWebsiteurl() != null) { + metadata.add(asXmlElement("websiteurl", o.getWebsiteurl().getValue())); + } + if (o.getLogourl() != null) { + metadata.add(asXmlElement("websiteurl", o.getLogourl().getValue())); + } + + if (o.getEclegalbody() != null) { + metadata.add(asXmlElement("eclegalbody", o.getEclegalbody().getValue())); + } + if (o.getEclegalperson() != null) { + metadata.add(asXmlElement("eclegalperson", o.getEclegalperson().getValue())); + } + if (o.getEcnonprofit() != null) { + metadata.add(asXmlElement("ecnonprofit", o.getEcnonprofit().getValue())); + } + if (o.getEcresearchorganization() != null) { + metadata.add(asXmlElement("ecresearchorganization", o.getEcresearchorganization().getValue())); + } + if (o.getEchighereducation() != null) { + metadata.add(asXmlElement("echighereducation", o.getEchighereducation().getValue())); + } + if (o.getEcinternationalorganization() != null) { + metadata.add(asXmlElement("ecinternationalorganizationeurinterests", o.getEcinternationalorganization().getValue())); + } + if (o.getEcinternationalorganization() != null) { + metadata.add(asXmlElement("ecinternationalorganization", o.getEcinternationalorganization().getValue())); + } + if (o.getEcenterprise() != null) { + metadata.add(asXmlElement("ecenterprise", o.getEcenterprise().getValue())); + } + if (o.getEcsmevalidated() != null) { + metadata.add(asXmlElement("ecsmevalidated", o.getEcsmevalidated().getValue())); + } + if (o.getEcnutscode() != null) { + metadata.add(asXmlElement("ecnutscode", o.getEcnutscode().getValue())); + } + if (o.getCountry() != null) { + metadata.add(mapQualifier("country", o.getCountry())); + } + + break; + case project: + + final Project p = (Project) entity; + + if (p.getWebsiteurl() != null) { + metadata.add(asXmlElement("websiteurl", p.getWebsiteurl().getValue())); + } + if (p.getCode() != null) { + metadata.add(asXmlElement("code", p.getCode().getValue())); + } + if (p.getAcronym() != null) { + metadata.add(asXmlElement("acronym", p.getAcronym().getValue())); + } + if (p.getTitle() != null) { + metadata.add(asXmlElement("title", p.getTitle().getValue())); + } + if (p.getStartdate() != null) { + metadata.add(asXmlElement("startdate", p.getStartdate().getValue())); + } + if (p.getEnddate() != null) { + metadata.add(asXmlElement("enddate", p.getEnddate().getValue())); + } + if (p.getCallidentifier() != null) { + metadata.add(asXmlElement("callidentifier", p.getCallidentifier().getValue())); + } + if (p.getKeywords() != null) { + metadata.add(asXmlElement("keywords", p.getKeywords().getValue())); + } + if (p.getDuration() != null) { + metadata.add(asXmlElement("duration", p.getDuration().getValue())); + } + if (p.getEcsc39() != null) { + metadata.add(asXmlElement("ecsc39", p.getEcsc39().getValue())); + } + if (p.getEcarticle29_3() != null) { + metadata.add(asXmlElement("ecarticle29_3", p.getEcarticle29_3().getValue())); + } + if (p.getSubjects() != null) { + metadata.addAll(p.getSubjects() + .stream() + .map(sp -> mapStructuredProperty("subject", sp)) + .collect(Collectors.toList())); + } + if (p.getContracttype() != null) { + metadata.add(mapQualifier("contracttype", p.getContracttype())); + } + if (p.getEcsc39() != null) { + metadata.add(asXmlElement("ecsc39", p.getEcsc39().getValue())); + } + if (p.getContactfullname() != null) { + metadata.add(asXmlElement("contactfullname", p.getContactfullname().getValue())); + } + if (p.getContactfax() != null) { + metadata.add(asXmlElement("contactfax", p.getContactfax().getValue())); + } + if (p.getContactphone() != null) { + metadata.add(asXmlElement("contactphone", p.getContactphone().getValue())); + } + if (p.getContactemail() != null) { + metadata.add(asXmlElement("contactemail", p.getContactemail().getValue())); + } + if (p.getSummary() != null) { + metadata.add(asXmlElement("summary", p.getSummary().getValue())); + } + if (p.getCurrency() != null) { + metadata.add(asXmlElement("currency", p.getCurrency().getValue())); + } + if (p.getTotalcost() != null) { + metadata.add(asXmlElement("totalcost", p.getTotalcost().toString())); + } + if (p.getFundedamount() != null) { + metadata.add(asXmlElement("fundedamount", p.getFundedamount().toString())); + } + if (p.getFundingtree() != null) { + metadata.addAll(p.getFundingtree() + .stream() + .map(ft -> asXmlElement("fundingtree", ft.getValue())) + .collect(Collectors.toList())); + } + + break; + default: + throw new IllegalArgumentException("invalid entity type: " + type); + } + + return metadata; + } + + private void mapDatasourceType(List metadata, final Qualifier dsType) { + metadata.add(mapQualifier("datasourcetype", dsType)); + + if (specialDatasourceTypes.contains(dsType.getClassid())) { + dsType.setClassid("other"); + dsType.setClassname("other"); + } + metadata.add(mapQualifier("datasourcetypeui", dsType)); + } + + private Qualifier getBestAccessright(final Result r) { + Qualifier bestAccessRight = new Qualifier(); + bestAccessRight.setClassid("UNKNOWN"); + bestAccessRight.setClassname("not available"); + bestAccessRight.setSchemeid("dnet:access_modes"); + bestAccessRight.setSchemename("dnet:access_modes"); + + final LicenseComparator lc = new LicenseComparator(); + for (final Instance instance : r.getInstance()) { + if (lc.compare(bestAccessRight, instance.getAccessright()) > 0) { + bestAccessRight = instance.getAccessright(); + } + } + return bestAccessRight; + } + + private List listRelations(final JoinedEntity je, TemplateFactory templateFactory) { + final List rels = Lists.newArrayList(); + + for (final Tuple2 link : je.getLinks()) { + + final Relation rel = link.getRelation(); + final RelatedEntity re = link.getRelatedEntity(); + final String targetType = link.getRelatedEntity().getType(); + + final List metadata = Lists.newArrayList(); + switch (EntityType.valueOf(targetType)) { + case publication: + case dataset: + case otherresearchproduct: + case software: + if (re.getTitle() != null && isNotBlank(re.getTitle().getValue())) { + metadata.add(mapStructuredProperty("title", re.getTitle())); + } + if (isNotBlank(re.getDateofacceptance())) { + metadata.add(asXmlElement("dateofacceptance", re.getDateofacceptance())); + } + if (isNotBlank(re.getPublisher())) { + metadata.add(asXmlElement("publisher", re.getPublisher())); + } + if (isNotBlank(re.getCodeRepositoryUrl())) { + metadata.add(asXmlElement("coderepositoryurl", re.getCodeRepositoryUrl())); + } + if (re.getResulttype() != null & !re.getResulttype().isBlank()) { + metadata.add(mapQualifier("resulttype", re.getResulttype())); + } + if (re.getCollectedfrom() != null) { + metadata.addAll(re.getCollectedfrom() + .stream() + .map(kv -> mapKeyValue("collectedfrom", kv)) + .collect(Collectors.toList())); + } + if (re.getPid() != null) { + metadata.addAll(re.getPid() + .stream() + .map(p -> mapStructuredProperty("pid", p)) + .collect(Collectors.toList())); + } + break; + case datasource: + if (isNotBlank(re.getOfficialname())) { + metadata.add(asXmlElement("officialname", re.getOfficialname())); + } + if (re.getDatasourcetype() != null & !re.getDatasourcetype().isBlank()) { + mapDatasourceType(metadata, re.getDatasourcetype()); + } + if (re.getOpenairecompatibility() != null & !re.getOpenairecompatibility().isBlank()) { + metadata.add(mapQualifier("openairecompatibility", re.getOpenairecompatibility())); + } + break; + case organization: + if (isNotBlank(re.getLegalname())) { + metadata.add(asXmlElement("legalname", re.getLegalname())); + } + if (isNotBlank(re.getLegalshortname())) { + metadata.add(asXmlElement("legalshortname", re.getLegalshortname())); + } + if (re.getCountry() != null & !re.getCountry().isBlank()) { + metadata.add(mapQualifier("country", re.getCountry())); + } + break; + case project: + if (isNotBlank(re.getProjectTitle())) { + metadata.add(asXmlElement("title", re.getProjectTitle())); + } + if (isNotBlank(re.getCode())) { + metadata.add(asXmlElement("code", re.getCode())); + } + if (isNotBlank(re.getAcronym())) { + metadata.add(asXmlElement("acronym", re.getAcronym())); + } + if (re.getContracttype() != null & !re.getContracttype().isBlank()) { + metadata.add(mapQualifier("contracttype", re.getContracttype())); + } + if (re.getFundingtree() != null) { + metadata.addAll(re.getFundingtree() + .stream() + .peek(ft -> fillContextMap(ft)) + .map(ft -> getRelFundingTree(ft)) + .collect(Collectors.toList())); + } + break; + default: + throw new IllegalArgumentException("invalid target type: " + targetType); + + } + final DataInfo info = rel.getDataInfo(); + + rels.add(templateFactory.getRel( + targetType, + rel.getTarget(), + Sets.newHashSet(metadata), + getInverseRelClass(rel.getRelClass()), + getScheme(targetType, re.getType()), + info)); + } + return rels; + } + + private List listChildren(final JoinedEntity je, TemplateFactory templateFactory) { + + final List children = Lists.newArrayList(); + + if (MainEntityType.result.toString().equals(getMainType(je.getType()))) { + final List instances = ((Result) je.getEntity()).getInstance(); + if (instances != null) { + for (final Instance instance : ((Result) je.getEntity()).getInstance()) { + + final List fields = Lists.newArrayList(); + + if (instance.getAccessright() != null && !instance.getAccessright().isBlank()) { + fields.add(mapQualifier("accessright", instance.getAccessright())); + } + if (instance.getCollectedfrom() != null) { + fields.add(mapKeyValue("collectedfrom", instance.getCollectedfrom())); + } + if (instance.getHostedby() != null) { + fields.add(mapKeyValue("hostedby", instance.getHostedby())); + } + if (instance.getDateofacceptance() != null && isNotBlank(instance.getDateofacceptance().getValue())) { + fields.add(asXmlElement("dateofacceptance", instance.getDateofacceptance().getValue())); + } + if (instance.getInstancetype() != null && !instance.getInstancetype().isBlank()) { + fields.add(mapQualifier("instancetype", instance.getInstancetype())); + } + if (isNotBlank(instance.getDistributionlocation())) { + fields.add(asXmlElement("distributionlocation", instance.getDistributionlocation())); + } + + children.add(templateFactory.getInstance(instance.getHostedby().getKey(), fields, instance.getUrl())); + } + } + final List ext = ((Result) je.getEntity()).getExternalReference(); + if (ext != null) { + for (final ExternalReference er : ((Result) je.getEntity()).getExternalReference()) { + + final List fields = Lists.newArrayList(); + + if (isNotBlank(er.getSitename())) { + fields.add(asXmlElement("sitename", er.getSitename())); + } + if (isNotBlank(er.getLabel())) { + fields.add(asXmlElement("label", er.getLabel())); + } + if (isNotBlank(er.getUrl())) { + fields.add(asXmlElement("url", er.getUrl())); + } + if (isNotBlank(er.getDescription())) { + fields.add(asXmlElement("description", er.getDescription())); + } + if (isNotBlank(er.getUrl())) { + fields.add(mapQualifier("qualifier", er.getQualifier())); + } + if (isNotBlank(er.getRefidentifier())) { + fields.add(asXmlElement("refidentifier", er.getRefidentifier())); + } + if (isNotBlank(er.getQuery())) { + fields.add(asXmlElement("query", er.getQuery())); + } + + children.add(templateFactory.getChild("externalreference", null, fields)); + } + } + } + + return children; + } + + private List listExtraInfo(JoinedEntity je) { + final List extraInfo = je.getEntity().getExtraInfo(); + return extraInfo != null ? extraInfo + .stream() + .map(e -> mapExtraInfo(e)) + .collect(Collectors.toList()) : Lists.newArrayList(); + } + + private List buildContexts(final String type) { + final List res = Lists.newArrayList(); + + if ((contextMapper != null) && !contextMapper.isEmpty() && MainEntityType.result.toString().equals(type)) { + + XMLTag document = XMLDoc.newDocument(true).addRoot("contextRoot"); + + for (final String context : contextes) { + + String id = ""; + for (final String token : Splitter.on("::").split(context)) { + id += token; + + final ContextDef def = contextMapper.get(id); + + if (def == null) { + continue; + // throw new IllegalStateException(String.format("cannot find context for id '%s'", id)); + } + + if (def.getName().equals("context")) { + final String xpath = "//context/@id='" + def.getId() + "'"; + if (!document.gotoRoot().rawXpathBoolean(xpath, new Object())) { + document = addContextDef(document.gotoRoot(), def); + } + } + + if (def.getName().equals("category")) { + final String rootId = substringBefore(def.getId(), "::"); + document = addContextDef(document.gotoRoot().gotoTag("//context[./@id='" + rootId + "']", new Object()), def); + } + + if (def.getName().equals("concept")) { + document = addContextDef(document, def).gotoParent(); + } + id += "::"; + } + } + final Transformer transformer = getTransformer(); + for (final org.w3c.dom.Element x : document.gotoRoot().getChildElement()) { + try { + res.add(asStringElement(x, transformer)); + } catch (final TransformerException e) { + throw new RuntimeException(e); + } + } + } + + return res; + } + + private Transformer getTransformer() { + try { + Transformer transformer = TransformerFactory.newInstance().newTransformer(); + transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); + return transformer; + } catch (TransformerConfigurationException e) { + throw new IllegalStateException("unable to create javax.xml.transform.Transformer", e); + } + } + + private XMLTag addContextDef(final XMLTag tag, final ContextDef def) { + tag.addTag(def.getName()).addAttribute("id", def.getId()).addAttribute("label", def.getLabel()); + if ((def.getType() != null) && !def.getType().isEmpty()) { + tag.addAttribute("type", def.getType()); + } + return tag; + } + + private String asStringElement(final org.w3c.dom.Element element, final Transformer transformer) throws TransformerException { + final StringWriter buffer = new StringWriter(); + transformer.transform(new DOMSource(element), new StreamResult(buffer)); + return buffer.toString(); + } + + private void fillContextMap(final String xmlTree) { + + Document fundingPath; + try { + fundingPath = new SAXReader().read(new StringReader(xmlTree)); + } catch (final DocumentException e) { + throw new RuntimeException(e); + } + try { + final Node funder = fundingPath.selectSingleNode("//funder"); + + if (funder != null) { + + final String funderShortName = funder.valueOf("./shortname"); + contextes.add(funderShortName); + + contextMapper.put(funderShortName, new ContextDef(funderShortName, funder.valueOf("./name"), "context", "funding")); + final Node level0 = fundingPath.selectSingleNode("//funding_level_0"); + if (level0 != null) { + final String level0Id = Joiner.on("::").join(funderShortName, level0.valueOf("./name")); + contextMapper.put(level0Id, new ContextDef(level0Id, level0.valueOf("./description"), "category", "")); + final Node level1 = fundingPath.selectSingleNode("//funding_level_1"); + if (level1 == null) { + contextes.add(level0Id); + } else { + final String level1Id = Joiner.on("::").join(level0Id, level1.valueOf("./name")); + contextMapper.put(level1Id, new ContextDef(level1Id, level1.valueOf("./description"), "concept", "")); + final Node level2 = fundingPath.selectSingleNode("//funding_level_2"); + if (level2 == null) { + contextes.add(level1Id); + } else { + final String level2Id = Joiner.on("::").join(level1Id, level2.valueOf("./name")); + contextMapper.put(level2Id, new ContextDef(level2Id, level2.valueOf("./description"), "concept", "")); + contextes.add(level2Id); + } + } + } + } + } catch (final NullPointerException e) { + throw new IllegalArgumentException("malformed funding path: " + xmlTree, e); + } + } + + + + @SuppressWarnings("unchecked") + private String getRelFundingTree(final String xmlTree) { + String funding = ""; + try { + final Document ftree = new SAXReader().read(new StringReader(xmlTree)); + funding = ""; + + funding += getFunderElement(ftree); + + for (final Object o : Lists.reverse(ftree.selectNodes("//fundingtree//*[starts-with(local-name(),'funding_level_')]"))) { + final Element e = (Element) o; + final String _id = e.valueOf("./id"); + funding += "<" + e.getName() + " name=\"" + escapeXml(e.valueOf("./name")) + "\">" + escapeXml(_id) + ""; + } + } catch (final DocumentException e) { + throw new IllegalArgumentException("unable to parse funding tree: " + xmlTree + "\n" + e.getMessage()); + } finally { + funding += ""; + } + return funding; + } + + private String getFunderElement(final Document ftree) { + final String funderId = ftree.valueOf("//fundingtree/funder/id/text()"); + final String funderShortName = ftree.valueOf("//fundingtree/funder/shortname/text()"); + final String funderName = ftree.valueOf("//fundingtree/funder/name/text()"); + final String funderJurisdiction = ftree.valueOf("//fundingtree/funder/jurisdiction/text()"); + + return ""; + } + +} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlSerializationUtils.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlSerializationUtils.java new file mode 100644 index 000000000..3088828ab --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlSerializationUtils.java @@ -0,0 +1,151 @@ +package eu.dnetlib.dhp.graph.utils; + +import eu.dnetlib.dhp.schema.oaf.*; + +import static eu.dnetlib.dhp.graph.utils.GraphMappingUtils.removePrefix; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +public class XmlSerializationUtils { + + // XML 1.0 + // #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + private final static String xml10pattern = "[^" + + "\u0009\r\n" + + "\u0020-\uD7FF" + + "\uE000-\uFFFD" + + "\ud800\udc00-\udbff\udfff" + + "]"; + + public static String mapJournal(Journal j) { + final String attrs = new StringBuilder() + .append(attr("issn", j.getIssnPrinted())) + .append(attr("eissn", j.getIssnOnline())) + .append(attr("lissn", j.getIssnLinking())) + .append(attr("ep", j.getEp())) + .append(attr("iss", j.getIss())) + .append(attr("sp", j.getSp())) + .append(attr("vol", j.getVol())) + .toString() + .trim(); + + return new StringBuilder() + .append("") + .append(escapeXml(j.getName())) + .append("") + .toString(); + } + + private static String attr(final String name, final String value) { + return isNotBlank(value) ? name + "=\"" + escapeXml(value) + "\" " : ""; + } + + public static String mapStructuredProperty(String name, StructuredProperty t) { + return asXmlElement(name, t.getValue(), t.getQualifier(), t.getDataInfo() != null ? t.getDataInfo() : null); + } + + public static String mapQualifier(String name, Qualifier q) { + return asXmlElement(name, "", q, null); + } + + public static String escapeXml(final String value) { + return value + .replaceAll("&", "&") + .replaceAll("<", "<") + .replaceAll(">", ">") + .replaceAll("\"", """) + .replaceAll("'", "'") + .replaceAll(xml10pattern, ""); + } + + public static String parseDataInfo(final DataInfo dataInfo) { + return new StringBuilder() + .append("") + .append(asXmlElement("inferred", dataInfo.getInferred() + "")) + .append(asXmlElement("deletedbyinference", dataInfo.getDeletedbyinference() + "")) + .append(asXmlElement("trust", dataInfo.getTrust() + "")) + .append(asXmlElement("inferenceprovenance", dataInfo.getInferenceprovenance() + "")) + .append(asXmlElement("provenanceaction", null, dataInfo.getProvenanceaction(), null)) + .append("") + .toString(); + } + + private static StringBuilder dataInfoAsAttributes(final StringBuilder sb, final DataInfo info) { + return sb + .append(attr("inferred", info.getInferred() != null ? info.getInferred().toString() : "")) + .append(attr("inferenceprovenance", info.getInferenceprovenance())) + .append(attr("provenanceaction", info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "")) + .append(attr("trust", info.getTrust())); + } + + public static String mapKeyValue(final String name, final KeyValue kv) { + return new StringBuilder() + .append("<") + .append(name) + .append(" name=\"") + .append(escapeXml(kv.getValue())) + .append("\" id=\"") + .append(escapeXml(removePrefix(kv.getKey()))) + .append("\"/>") + .toString(); + } + + public static String mapExtraInfo(final ExtraInfo e) { + return new StringBuilder("") + .append(e.getValue()) + .append("") + .toString(); + } + + public static String asXmlElement(final String name, final String value) { + return asXmlElement(name, value, null, null); + } + + public static String asXmlElement(final String name, final String value, final Qualifier q, final DataInfo info) { + StringBuilder sb = new StringBuilder(); + sb.append("<"); + sb.append(name); + if (q != null) { + sb.append(getAttributes(q)); + } + if (info != null) { + sb + .append(" ") + .append(attr("inferred", info.getInferred() != null ? info.getInferred().toString() : "")) + .append(attr("inferenceprovenance", info.getInferenceprovenance())) + .append(attr("provenanceaction", info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "")) + .append(attr("trust", info.getTrust())); + } + if (isBlank(value)) { + sb.append("/>"); + return sb.toString(); + } + + sb.append(">"); + sb.append(escapeXml(value)); + sb.append(""); + + return sb.toString(); + } + + public static String getAttributes(final Qualifier q) { + if (q == null || q.isBlank()) return ""; + + return new StringBuilder(" ") + .append(attr("classid", q.getClassid())) + .append(attr("classname", q.getClassname())) + .append(attr("schemeid", q.getSchemeid())) + .append(attr("schemename", q.getSchemename())) + .toString(); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/META-INF/services/javax.xml.transform.TransformerFactory b/dhp-workflows/dhp-graph-provision/src/main/resources/META-INF/services/javax.xml.transform.TransformerFactory new file mode 100644 index 000000000..b53ca855f --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/META-INF/services/javax.xml.transform.TransformerFactory @@ -0,0 +1 @@ +net.sf.saxon.TransformerFactoryImpl \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_build_adjacency_lists.json similarity index 65% rename from dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json rename to dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_build_adjacency_lists.json index cbd4285bf..e63322028 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_graph_parameters.json +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_build_adjacency_lists.json @@ -1,5 +1,6 @@ [ {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, + {"paramName":"is", "paramLongName":"isLookupUrl", "paramDescription": "URL of the isLookUp Service", "paramRequired": true}, {"paramName":"o", "paramLongName":"outputPath", "paramDescription": "the path used to store temporary output files", "paramRequired": true}, - {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequencial file to read", "paramRequired": true} + {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequence file to read", "paramRequired": true} ] \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_update_index.json b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_update_index.json new file mode 100644 index 000000000..0d45e9e29 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/input_params_update_index.json @@ -0,0 +1,7 @@ +[ + {"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true}, + {"paramName":"is", "paramLongName":"isLookupUrl", "paramDescription": "URL of the isLookUp Service", "paramRequired": true}, + {"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the path of the sequence file to read the XML records", "paramRequired": true}, + {"paramName":"f", "paramLongName":"format", "paramDescription": "MDFormat name found in the IS profile", "paramRequired": true}, + {"paramName":"b", "paramLongName":"batchSize", "paramDescription": "size of the batch of documents sent to solr", "paramRequired": false} +] diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml index 4b4d2c7bf..fee463868 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -26,7 +26,15 @@ - + + + + + ${wf:conf('reuseRecords') eq false} + ${wf:conf('reuseRecords') eq true} + + + Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}] @@ -36,7 +44,7 @@ ${jobTracker} ${nameNode} - yarn-cluster + yarn cluster build_adjacency_lists eu.dnetlib.dhp.graph.SparkXmlRecordBuilderJob @@ -47,14 +55,43 @@ --driver-memory=${sparkDriverMemory} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" - --conf spark.sql.warehouse.dir="/user/hive/warehouse" --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} - -mt yarn-cluster + -mt yarn + -is ${isLookupUrl} --sourcePath${sourcePath} --outputPath${outputPath} + + + + + + + ${jobTracker} + ${nameNode} + yarn + cluster + to_solr_index + eu.dnetlib.dhp.graph.SparkXmlIndexingJob + dhp-graph-provision-${projectVersion}.jar + + --executor-memory ${sparkExecutorMemory} + --executor-cores ${sparkExecutorCoresForIndexing} + --driver-memory=${sparkDriverMemory} + --conf spark.executor.instances=${sparkExecutorInstances} + --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" + --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" + --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} + --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} + + -mt yarn + -is ${isLookupUrl} + --sourcePath${outputPath}/xml + --format${format} + --batchSize${batchSize} + diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/child.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/child.st new file mode 100644 index 000000000..89f81e16b --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/child.st @@ -0,0 +1,3 @@ +> + $metadata:{ it | $it$ }$ + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/entity.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/entity.st new file mode 100644 index 000000000..d16f3c3e0 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/entity.st @@ -0,0 +1,10 @@ + + $metadata:{ it | $it$ }$ + + $rels:{ it | $it$ }$ + + + $children:{ it | $it$ }$ + + +$extrainfo:{ it | $it$ }$ \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/instance.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/instance.st new file mode 100644 index 000000000..64bed05b4 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/instance.st @@ -0,0 +1,4 @@ + + $metadata:{ it | $it$ }$ + $webresources:{ it | $it$ }$ + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/record.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/record.st new file mode 100644 index 000000000..dea68eab8 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/record.st @@ -0,0 +1,17 @@ + + + +
+ $id$ + $dateofcollection$ + $dateoftransformation$ +
+ + + $it$ + + +
+
\ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/rel.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/rel.st new file mode 100644 index 000000000..af19ba497 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/rel.st @@ -0,0 +1,4 @@ + + $objIdentifier$ + $metadata:{ it | $it$ }$ + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/webresource.st b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/webresource.st new file mode 100644 index 000000000..7ff6c5d7f --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/template/webresource.st @@ -0,0 +1,3 @@ + + $identifier$ + \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java index fdff4d984..a9d696bea 100644 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java @@ -1,5 +1,8 @@ package eu.dnetlib.dhp.graph; +import eu.dnetlib.dhp.graph.model.EntityRelEntity; +import eu.dnetlib.dhp.graph.model.RelatedEntity; +import eu.dnetlib.dhp.graph.utils.GraphMappingUtils; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Before; import org.junit.Test; @@ -23,23 +26,34 @@ public class MappingUtilsTest { final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); e.getSource().setType("datasource"); - final EntityRelEntity out = utils.pruneModel(e); + final EntityRelEntity out = utils.asRelatedEntity(e); + System.out.println(out); + + } + + //@Test + public void testOafMappingResult() throws IOException { + + final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json")); + final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); + + final EntityRelEntity out = utils.asRelatedEntity(e); System.out.println(out); } @Test - public void testOafMappinResult() throws IOException { + public void testOafMappingSoftware() throws IOException { - final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json")); + final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("software.json")); final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); - e.getSource().setType("otherresearchproduct"); - final EntityRelEntity out = utils.pruneModel(e); + final EntityRelEntity out = utils.asRelatedEntity(e); System.out.println(out); } + @Test public void testParseRelatedEntity() throws IOException { diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java new file mode 100644 index 000000000..2a3c343ec --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java @@ -0,0 +1,55 @@ +package eu.dnetlib.dhp.graph; + +import eu.dnetlib.dhp.graph.utils.ContextMapper; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.spark.sql.SparkSession; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class XmlRecordFactoryTest { + + private static final Log log = LogFactory.getLog(XmlRecordFactoryTest.class); + + private Path testDir; + + @Before + public void setup() throws IOException { + testDir = Files.createTempDirectory(getClass().getSimpleName()); + log.info("created test directory " + testDir.toString()); + } + + @After + public void tearDown() throws IOException { + FileUtils.deleteDirectory(testDir.toFile()); + log.info("deleted test directory " + testDir.toString()); + } + + @Test + public void testXmlSerialization() throws Exception { + + final SparkSession spark = SparkSession + .builder() + .appName(SparkXmlRecordBuilderJob.class.getSimpleName()) + .master("local[*]") + .getOrCreate(); + + final String inputDir = testDir.toString() + "/3_joined_entities"; + FileUtils.forceMkdir(new File(inputDir)); + FileUtils.copyFile(new File("/Users/claudio/Downloads/joined_entities-part-00000"), new File(inputDir + "/joined_entities-part-00000")); + + final ContextMapper ctx = ContextMapper.fromIS("https://dev-openaire.d4science.org:443/is/services/isLookUp"); + + final GraphJoiner g = new GraphJoiner(spark, ctx, inputDir, testDir.toString()); + + g.asXML(); + } + +} diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json new file mode 100644 index 000000000..0065b6799 --- /dev/null +++ b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json @@ -0,0 +1 @@ +{"type":"software","entity":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"id":"50|od______2659::05817f64c43a918a07483340b5726f77","originalId":["oai:zenodo.org:204139"],"collectedfrom":[{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}}],"pid":[],"extraInfo":[],"author":[],"resulttype":{"classid":"software","classname":"software","schemeid":"dnet:result_typologies","schemename":"dnet:result_typologies"},"language":{"classid":"und","classname":"Undetermined","schemeid":"dnet:languages","schemename":"dnet:languages"},"country":[],"subject":[],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"2016-01-01","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"refereed":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"context":[],"instance":[{"license":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"accessright":{"classid":"OPEN","classname":"Open Access","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"instancetype":{"classid":"0029","classname":"Software","schemeid":"dnet:publication_resource","schemename":"dnet:publication_resource"},"hostedby":{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"url":[],"distributionlocation":"","collectedfrom":{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"dateofacceptance":{"value":"2016-01-01","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}}}],"documentationUrl":[],"license":[],"codeRepositoryUrl":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"programmingLanguage":{"classid":"","classname":"","schemeid":"","schemename":""}},"links":[{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::e2a38892773e6541ec7c07aa605ad581"},"relatedEntity":{"id":"40|corda__h2020::e2a38892773e6541ec7c07aa605ad581","type":"project","projectTitle":"Engaging the EGI Community towards an Open Science Commons","code":"654142","acronym":"EGI-Engage","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::4d31ccb13726266f9098129756e03f43"},"relatedEntity":{"id":"40|corda_______::4d31ccb13726266f9098129756e03f43","type":"project","projectTitle":"Common Operations of Environmental Research Infrastructures","code":"283465","acronym":"ENVRI","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::5af7655a8e0e871cf16072b4b6ab9b41"},"relatedEntity":{"id":"40|corda_______::5af7655a8e0e871cf16072b4b6ab9b41","type":"project","projectTitle":"Data e-Infrastructure Initiative for Fisheries Management and Conservation of Marine Living Resources","code":"283644","acronym":"IMARINE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::e8da2e3e130ad3b1a650487d9ff126e4"},"relatedEntity":{"id":"40|corda_______::e8da2e3e130ad3b1a650487d9ff126e4","type":"project","projectTitle":"EU-Brazil Open Data and Cloud Computing e-Infrastructure for Biodiversity","code":"288754","acronym":"EUBRAZILOPENBIO","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP1::ICTInformation and Communication TechnologiesICTec:programec__________::EC::FP7::SP1SP1-CooperationSP1ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::15463ed3cba51f042181197cfabb2ff5"},"relatedEntity":{"id":"40|corda_______::15463ed3cba51f042181197cfabb2ff5","type":"project","projectTitle":"Data Infrastructure Ecosystem for Science","code":"239019","acronym":"D4SCIENCE-II","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::4d46893df18bb77f5d817b8ce98ac56c"},"relatedEntity":{"id":"40|corda__h2020::4d46893df18bb77f5d817b8ce98ac56c","type":"project","projectTitle":"Pooling Activities, Resources and Tools for Heritage E-research Networking, Optimization and Synergies","code":"654119","acronym":"PARTHENOS","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::7f18b83690e3a18134b9a3db66d882d3"},"relatedEntity":{"id":"40|corda_______::7f18b83690e3a18134b9a3db66d882d3","type":"project","projectTitle":"DIstributed colLaboratories Infrastructure on Grid ENabled Technology 4 Science","code":"212488","acronym":"D4SCIENCE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::6729c0ee95de7724deb60454bb4179de"},"relatedEntity":{"id":"40|corda__h2020::6729c0ee95de7724deb60454bb4179de","type":"project","projectTitle":"Building Research environments for fostering Innovation, Decision making, Governance and Education to support Blue growth","code":"675680","acronym":"BlueBRIDGE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::0da81b3ad78047f577dd405e8a2d7f07"},"relatedEntity":{"id":"40|corda__h2020::0da81b3ad78047f577dd405e8a2d7f07","type":"project","projectTitle":"Environmental Research Infrastructures Providing Shared Solutions for Science and Society","code":"654182","acronym":"ENVRI PLUS","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::e7f5e7755409fc74eea9d168ab795634"},"relatedEntity":{"id":"40|corda__h2020::e7f5e7755409fc74eea9d168ab795634","type":"project","projectTitle":"SoBigData Research Infrastructure","code":"654024","acronym":"SoBigData","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}}]} \ No newline at end of file diff --git a/pom.xml b/pom.xml index aedf5ebff..f14877500 100644 --- a/pom.xml +++ b/pom.xml @@ -96,6 +96,12 @@ ${dhp.hadoop.version} provided
+ + org.apache.hadoop + hadoop-common + ${dhp.hadoop.version} + provided + org.apache.hadoop hadoop-client @@ -149,7 +155,7 @@ net.sf.saxon Saxon-HE - 9.5.1-5 + 9.9.1-6 @@ -170,6 +176,51 @@ 1.1.6 + + com.mycila.xmltool + xmltool + 3.3 + + + + org.apache.solr + solr-solrj + 7.5.0 + + + * + * + + + + + com.lucidworks.spark + spark-solr + 3.6.0 + + + * + * + + + + + + org.apache.httpcomponents + httpclient + 4.5.3 + + + org.noggit + noggit + 0.8 + + + org.apache.zookeeper + zookeeper + 3.4.11 + + net.schmizz sshj @@ -202,8 +253,17 @@ dnet-pace-core 4.0.0-SNAPSHOT + + eu.dnetlib + cnr-rmi-api + [2.0.0,3.0.0) + - + + org.apache.cxf + cxf-rt-transports-http + 3.1.5 + javax.persistence javax.persistence-api @@ -231,6 +291,11 @@ secondstring 1.0.0 + + org.antlr + stringtemplate + 4.0 + org.apache.oozie From 56d1810a66063b886a59e501790e064e6ac26750 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Fri, 14 Feb 2020 12:28:52 +0100 Subject: [PATCH 17/21] working procedure for records indexing using Spark, via lib com.lucidworks.spark:spark-solr --- .../eu/dnetlib/dhp/utils/saxon/PickFirst.java | 19 ++++++++++----- .../job-override.properties | 7 +++--- dhp-workflows/dhp-graph-provision/pom.xml | 7 +++++- .../dhp/graph/SparkXmlIndexingJob.java | 2 +- .../dhp/graph/utils/XmlRecordFactory.java | 24 +++++++++---------- .../dnetlib/dhp/graph/oozie_app/workflow.xml | 3 +-- dhp-workflows/pom.xml | 1 + pom.xml | 5 ++++ 8 files changed, 42 insertions(+), 26 deletions(-) diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java index 1f209bed0..a221e37c6 100644 --- a/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java +++ b/dhp-common/src/main/java/eu/dnetlib/dhp/utils/saxon/PickFirst.java @@ -1,6 +1,7 @@ package eu.dnetlib.dhp.utils.saxon; import net.sf.saxon.expr.XPathContext; +import net.sf.saxon.om.Item; import net.sf.saxon.om.Sequence; import net.sf.saxon.trans.XPathException; import net.sf.saxon.value.SequenceType; @@ -19,15 +20,21 @@ public class PickFirst extends AbstractExtensionFunction { if (arguments == null | arguments.length == 0) { return new StringValue(""); } - String s1 = arguments[0].head().getStringValue(); - if (arguments.length > 1) { - String s2 = arguments[1].head().getStringValue(); + final String s1 = getValue(arguments[0]); + final String s2 = getValue(arguments[1]); - return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : ""); - } else { - return new StringValue(StringUtils.isNotBlank(s1) ? s1 : ""); + return new StringValue(StringUtils.isNotBlank(s1) ? s1 : StringUtils.isNotBlank(s2) ? s2 : ""); + } + + private String getValue(final Sequence arg) throws XPathException { + if (arg != null) { + final Item item = arg.head(); + if (item != null) { + return item.getStringValue(); + } } + return ""; } @Override diff --git a/dhp-workflows/dhp-graph-provision/job-override.properties b/dhp-workflows/dhp-graph-provision/job-override.properties index c7b173a14..b5ab07982 100644 --- a/dhp-workflows/dhp-graph-provision/job-override.properties +++ b/dhp-workflows/dhp-graph-provision/job-override.properties @@ -5,7 +5,6 @@ isLookupUrl=http://beta.services.openaire.eu:8280/is/services/isLookUp?wsdl sourcePath=/tmp/db_openaireplus_services.export_dhp.2020.02.03 outputPath=/tmp/openaire_provision format=TMF -batchSize=1000 -sparkExecutorCoresForIndexing=1 -sparkExecutorInstances=10 -reuseRecords=false \ No newline at end of file +batchSize=2000 +sparkExecutorCoresForIndexing=64 +reuseRecords=true \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/pom.xml b/dhp-workflows/dhp-graph-provision/pom.xml index 5e6beb249..f74c9b666 100644 --- a/dhp-workflows/dhp-graph-provision/pom.xml +++ b/dhp-workflows/dhp-graph-provision/pom.xml @@ -3,7 +3,7 @@ dhp-workflows eu.dnetlib.dhp - 1.0.5-SNAPSHOT + 1.1.6-SNAPSHOT 4.0.0 @@ -52,6 +52,11 @@ org.apache.httpcomponents httpclient + + org.apache.httpcomponents + httpmime + + org.noggit noggit diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java index e13f8bbe2..2775d93b4 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/SparkXmlIndexingJob.java @@ -45,7 +45,7 @@ public class SparkXmlIndexingJob { final String inputPath = parser.get("sourcePath"); final String isLookupUrl = parser.get("isLookupUrl"); final String format = parser.get("format"); - final Integer batchSize = parser.getObjectMap().containsKey("batckSize") ? Integer.valueOf(parser.get("batchSize")) : DEFAULT_BATCH_SIZE; + final Integer batchSize = parser.getObjectMap().containsKey("batchSize") ? Integer.valueOf(parser.get("batchSize")) : DEFAULT_BATCH_SIZE; final ISLookUpService isLookup = ISLookupClientFactory.getLookUpService(isLookupUrl); final String fields = getLayoutSource(isLookup, format); diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java index bd4f8ec6c..abcf2a7ec 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java +++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/graph/utils/XmlRecordFactory.java @@ -129,6 +129,9 @@ public class XmlRecordFactory implements Serializable { .map(t -> mapStructuredProperty("title", t)) .collect(Collectors.toList())); } + if (r.getBestaccessright() != null) { + metadata.add(mapQualifier("bestaccessright", r.getBestaccessright())); + } if (r.getAuthor() != null) { metadata.addAll(r.getAuthor() .stream() @@ -230,15 +233,6 @@ public class XmlRecordFactory implements Serializable { if (r.getResourcetype() != null) { metadata.add(mapQualifier("resourcetype", r.getResourcetype())); } - if (r.getRefereed() != null) { - metadata.add(asXmlElement("refereed", r.getRefereed().getValue())); - } - if (r.getProcessingchargeamount() != null) { - metadata.add(asXmlElement("processingchargeamount", r.getProcessingchargeamount().getValue())); - } - if (r.getProcessingchargecurrency() != null) { - metadata.add(asXmlElement("processingchargecurrency", r.getProcessingchargecurrency().getValue())); - } metadata.add(mapQualifier("bestaccessright", getBestAccessright(r))); @@ -544,9 +538,6 @@ public class XmlRecordFactory implements Serializable { if (p.getDuration() != null) { metadata.add(asXmlElement("duration", p.getDuration().getValue())); } - if (p.getEcsc39() != null) { - metadata.add(asXmlElement("ecsc39", p.getEcsc39().getValue())); - } if (p.getEcarticle29_3() != null) { metadata.add(asXmlElement("ecarticle29_3", p.getEcarticle29_3().getValue())); } @@ -759,6 +750,15 @@ public class XmlRecordFactory implements Serializable { if (isNotBlank(instance.getDistributionlocation())) { fields.add(asXmlElement("distributionlocation", instance.getDistributionlocation())); } + if (instance.getRefereed() != null && isNotBlank(instance.getRefereed().getValue())) { + fields.add(asXmlElement("refereed", instance.getRefereed().getValue())); + } + if (instance.getProcessingchargeamount() != null && isNotBlank(instance.getProcessingchargeamount().getValue())) { + fields.add(asXmlElement("processingchargeamount", instance.getProcessingchargeamount().getValue())); + } + if (instance.getProcessingchargecurrency() != null && isNotBlank(instance.getProcessingchargecurrency().getValue())) { + fields.add(asXmlElement("processingchargecurrency", instance.getProcessingchargecurrency().getValue())); + } children.add(templateFactory.getInstance(instance.getHostedby().getKey(), fields, instance.getUrl())); } diff --git a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml index fee463868..350358944 100644 --- a/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-graph-provision/src/main/resources/eu/dnetlib/dhp/graph/oozie_app/workflow.xml @@ -78,9 +78,8 @@ dhp-graph-provision-${projectVersion}.jar --executor-memory ${sparkExecutorMemory} - --executor-cores ${sparkExecutorCoresForIndexing} --driver-memory=${sparkDriverMemory} - --conf spark.executor.instances=${sparkExecutorInstances} + --conf spark.dynamicAllocation.maxExecutors=${sparkExecutorCoresForIndexing} --conf spark.extraListeners="com.cloudera.spark.lineage.NavigatorAppListener" --conf spark.sql.queryExecutionListeners="com.cloudera.spark.lineage.NavigatorQueryListener" --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress} diff --git a/dhp-workflows/pom.xml b/dhp-workflows/pom.xml index 59f06bdc3..05bfe677d 100644 --- a/dhp-workflows/pom.xml +++ b/dhp-workflows/pom.xml @@ -18,6 +18,7 @@ dhp-distcp dhp-graph-mapper dhp-dedup + dhp-graph-provision diff --git a/pom.xml b/pom.xml index 4e12ba1a9..8b01741d6 100644 --- a/pom.xml +++ b/pom.xml @@ -210,6 +210,11 @@ httpclient 4.5.3 + + org.apache.httpcomponents + httpmime + 4.5.3 + org.noggit noggit From 5bae30f399453d04bff8c48bd4a549748fcdda24 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 17 Feb 2020 13:38:33 +0100 Subject: [PATCH 18/21] adding readme for dhp-schema --- dhp-schemas/README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/dhp-schemas/README.md b/dhp-schemas/README.md index 473ad4cf1..7431cda42 100644 --- a/dhp-schemas/README.md +++ b/dhp-schemas/README.md @@ -1,3 +1,11 @@ Description of the project -------------------------- -This project defines **serialization schemas** of Avro data store files that are used to pass data between workflow nodes in the system. +This project defines **object schemas** of the OpenAIRE main entities and the relationships that intercur among them. +Namely it defines the model for + +- **research product (result)** which subclasses in publication, dataset, other research product, software +- **data source** object describing the data provider (institutional repository, aggregators, cris systems) +- **organization** research bodies managing a data source or participating to a research project +- **project** research project + +Te serialization of such objects (data store files) are used to pass data between workflow nodes in the processing pipeline. From 6a288625e58eb7252ed7dbfd16aa0ae709168438 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Mon, 17 Feb 2020 15:04:33 +0100 Subject: [PATCH 19/21] fixed workflow outgoing node --- .../resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml index 309a6d90f..dd6998db0 100644 --- a/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml +++ b/dhp-workflows/dhp-aggregation/src/main/resources/eu/dnetlib/dhp/migration/oozie_app/workflow.xml @@ -113,7 +113,7 @@ -pguser${postgresUser} -pgpasswd${postgresPassword} - + From 0f364605ffa63941e622225eca3050a5b2e287e0 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 18 Feb 2020 11:48:19 +0100 Subject: [PATCH 20/21] removed stale tests, need to reimplemente them anyway --- .../dnetlib/dhp/graph/MappingUtilsTest.java | 66 ------------------- .../dhp/graph/XmlRecordFactoryTest.java | 55 ---------------- 2 files changed, 121 deletions(-) delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java deleted file mode 100644 index a9d696bea..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/MappingUtilsTest.java +++ /dev/null @@ -1,66 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import eu.dnetlib.dhp.graph.model.EntityRelEntity; -import eu.dnetlib.dhp.graph.model.RelatedEntity; -import eu.dnetlib.dhp.graph.utils.GraphMappingUtils; -import org.codehaus.jackson.map.ObjectMapper; -import org.junit.Before; -import org.junit.Test; - -import java.io.IOException; -import java.io.InputStreamReader; - -public class MappingUtilsTest { - - private GraphMappingUtils utils; - - @Before - public void setUp() { - utils = new GraphMappingUtils(); - } - - @Test - public void testOafMappingDatasource() throws IOException { - - final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("datasource.json")); - final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); - e.getSource().setType("datasource"); - - final EntityRelEntity out = utils.asRelatedEntity(e); - System.out.println(out); - - } - - //@Test - public void testOafMappingResult() throws IOException { - - final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("result.json")); - final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); - - final EntityRelEntity out = utils.asRelatedEntity(e); - System.out.println(out); - - } - - @Test - public void testOafMappingSoftware() throws IOException { - - final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("software.json")); - final EntityRelEntity e = new ObjectMapper().readValue(in, EntityRelEntity.class); - - final EntityRelEntity out = utils.asRelatedEntity(e); - System.out.println(out); - - } - - - @Test - public void testParseRelatedEntity() throws IOException { - - final InputStreamReader in = new InputStreamReader(getClass().getResourceAsStream("related_entity.json")); - final RelatedEntity e = new ObjectMapper().readValue(in, RelatedEntity.class); - - System.out.println(e); - - } -} diff --git a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java b/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java deleted file mode 100644 index 2a3c343ec..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/java/eu/dnetlib/dhp/graph/XmlRecordFactoryTest.java +++ /dev/null @@ -1,55 +0,0 @@ -package eu.dnetlib.dhp.graph; - -import eu.dnetlib.dhp.graph.utils.ContextMapper; -import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.spark.sql.SparkSession; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; - -public class XmlRecordFactoryTest { - - private static final Log log = LogFactory.getLog(XmlRecordFactoryTest.class); - - private Path testDir; - - @Before - public void setup() throws IOException { - testDir = Files.createTempDirectory(getClass().getSimpleName()); - log.info("created test directory " + testDir.toString()); - } - - @After - public void tearDown() throws IOException { - FileUtils.deleteDirectory(testDir.toFile()); - log.info("deleted test directory " + testDir.toString()); - } - - @Test - public void testXmlSerialization() throws Exception { - - final SparkSession spark = SparkSession - .builder() - .appName(SparkXmlRecordBuilderJob.class.getSimpleName()) - .master("local[*]") - .getOrCreate(); - - final String inputDir = testDir.toString() + "/3_joined_entities"; - FileUtils.forceMkdir(new File(inputDir)); - FileUtils.copyFile(new File("/Users/claudio/Downloads/joined_entities-part-00000"), new File(inputDir + "/joined_entities-part-00000")); - - final ContextMapper ctx = ContextMapper.fromIS("https://dev-openaire.d4science.org:443/is/services/isLookUp"); - - final GraphJoiner g = new GraphJoiner(spark, ctx, inputDir, testDir.toString()); - - g.asXML(); - } - -} From ed76521d9b0d5c761262997aa064d980bda46dd5 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Tue, 18 Feb 2020 11:50:39 +0100 Subject: [PATCH 21/21] removed stale test resources, will be re-added later on --- .../src/test/resources/eu/dnetlib/dhp/graph/datasource.json | 1 - .../test/resources/eu/dnetlib/dhp/graph/related_entity.json | 5 ----- .../src/test/resources/eu/dnetlib/dhp/graph/result.json | 1 - .../src/test/resources/eu/dnetlib/dhp/graph/software.json | 1 - 4 files changed, 8 deletions(-) delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json delete mode 100644 dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json deleted file mode 100644 index c26154c1e..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/datasource.json +++ /dev/null @@ -1 +0,0 @@ -{"source":{"sourceId":"10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556","targetId":null,"deleted":false,"oaf":"{\"datasourcetype\":{\"classid\":\"crissystem\",\"classname\":\"CRIS System\",\"schemeid\":\"dnet:datasource_typologies\",\"schemename\":\"dnet:datasource_typologies\"},\"openairecompatibility\":{\"classid\":\"openaire-cris_1.1\",\"classname\":\"OpenAIRE CRIS v1.1\",\"schemeid\":\"dnet:datasourceCompatibilityLevel\",\"schemename\":\"dnet:datasourceCompatibilityLevel\"},\"officialname\":{\"value\":\"CRIS UNS (Current Research Information System University of Novi Sad)\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"englishname\":{\"value\":\"CRIS UNS (Current Research Information System University of Novi Sad)\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"websiteurl\":{\"value\":\"https://cris.uns.ac.rs/\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"logourl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"contactemail\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"namespaceprefix\":{\"value\":\"CrisUnsNoviS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"latitude\":{\"value\":\"0.0\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"longitude\":{\"value\":\"0.0\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"dateofvalidation\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"description\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"subjects\":[],\"odnumberofitems\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odnumberofitemsdate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odpolicies\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"odlanguages\":[],\"odcontenttypes\":[],\"accessinfopackage\":[{\"value\":\"https://cris.uns.ac.rs/OAIHandlerOpenAIRECRIS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"releasestartdate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"releaseenddate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"missionstatementurl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"dataprovider\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"serviceprovider\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"databaseaccesstype\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"datauploadtype\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"databaseaccessrestriction\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"datauploadrestriction\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"versioning\":{\"value\":false,\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"citationguidelineurl\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"qualitymanagementkind\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"pidsystems\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"certificates\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"policies\":[],\"journal\":{\"name\":\"\",\"issnPrinted\":\"\",\"issnOnline\":\"\",\"issnLinking\":\"\",\"ep\":\"\",\"iss\":\"\",\"sp\":\"\",\"vol\":\"\",\"edition\":\"\",\"conferenceplace\":\"\",\"conferencedate\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"id\":\"10|CRIS_UNS____::f66f1bd369679b5b077dcdf006089556\",\"originalId\":[\"CRIS_UNS____::openaire\"],\"collectedfrom\":[{\"key\":\"\",\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"pid\":[],\"extraInfo\":[],\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"sysimport:crosswalk:entityregistry\",\"classname\":\"sysimport:crosswalk:entityregistry\",\"schemeid\":\"dnet:provenance_actions\",\"schemename\":\"dnet:provenance_actions\"}},\"lastupdatetimestamp\":0}"},"relation":null,"target":null} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json deleted file mode 100644 index 25c92baa3..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/related_entity.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "id": "20|nih_________::6b8108b6d6399f7163a6a7ccdd0efc2d", - "type": "organization", - "legalname": "MCGILL UNIVERSITY" -} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json deleted file mode 100644 index 5d6c3f29b..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/result.json +++ /dev/null @@ -1 +0,0 @@ -{"source":{"sourceId":"50|od_______165::5642f79c597bac8029fde12a80f75412","targetId":null,"deleted":true,"type":"otherresearchproduct","oaf":"{\"contactperson\":[],\"contactgroup\":[],\"tool\":[],\"author\":[{\"fullname\":\"Cartier, Adrien\",\"name\":\"Adrien\",\"surname\":\"Cartier\",\"rank\":1,\"pid\":[],\"affiliation\":[]},{\"fullname\":\"Larroudé, Philippe\",\"name\":\"Philippe\",\"surname\":\"Larroudé\",\"rank\":2,\"pid\":[],\"affiliation\":[]},{\"fullname\":\"Héquette, Arnaud\",\"name\":\"Arnaud\",\"surname\":\"Héquette\",\"rank\":3,\"pid\":[],\"affiliation\":[]}],\"resulttype\":{\"classid\":\"other\",\"classname\":\"other\",\"schemeid\":\"dnet:result_typologies\",\"schemename\":\"dnet:result_typologies\"},\"language\":{\"classid\":\"eng\",\"classname\":\"English\",\"schemeid\":\"dnet:languages\",\"schemename\":\"dnet:languages\"},\"country\":[],\"subject\":[{\"value\":\"[SDU.STU.OC] Sciences of the Universe/Earth Sciences/Oceanography\",\"qualifier\":{\"classid\":\"keyword\",\"classname\":\"keyword\",\"schemeid\":\"dnet:subject_classification_typologies\",\"schemename\":\"dnet:subject_classification_typologies\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"title\":[{\"value\":\"Longshore Sediment Transport Measurements on Sandy Macrotidal Beaches Compared with Sediment Transport Formulae\",\"qualifier\":{\"classid\":\"main title\",\"classname\":\"main title\",\"schemeid\":\"dnet:dataCite_title\",\"schemename\":\"dnet:dataCite_title\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"relevantdate\":[],\"description\":[{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"dateofacceptance\":{\"value\":\"2013-03-13\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"publisher\":{\"value\":\"intech\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"embargoenddate\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"source\":[{\"value\":\"Sediment Transport Processes and Their Modelling Applications\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"https://hal.archives-ouvertes.fr/hal-00824453\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Sediment Transport Processes and Their Modelling Applications, intech, chapitre 2, 2013, 978-953-51-1039-2,. \\u0026lt;10.5772/51023\\u0026gt;\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"fulltext\":[],\"format\":[],\"contributor\":[{\"value\":\"Equipe Morphodynamique des littoraux (Dunkerque) ; Laboratoire d\\u0027Océanologie et de Géosciences (LOG) ; Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS - Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Laboratoire des écoulements géophysiques et industriels (LEGI) ; Université Joseph Fourier - Grenoble I - Institut polytechnique de Grenoble (Grenoble INP) - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},{\"value\":\"Laboratoire d\\u0027Océanologie et de Géosciences (LOG) ; Université du Littoral Côte d\\u0027Opale - Université Lille I - Sciences et technologies - CNRS\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"resourcetype\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"},\"coverage\":[],\"refereed\":{\"value\":\"\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}},\"context\":[],\"id\":\"50|od_______165::5642f79c597bac8029fde12a80f75412\",\"originalId\":[\"oai:HAL:hal-00824453v1\"],\"collectedfrom\":[{\"key\":\"10|opendoar____::9766527f2b5d3e95d4a733fcfb77bd7e\",\"value\":\"INRIA a CCSD electronic archive server\",\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"pid\":[{\"value\":\"10.5772/51023\",\"qualifier\":{\"classid\":\"doi\",\"classname\":\"doi\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"dataInfo\":{\"invisible\":false,\"inferred\":false,\"deletedbyinference\":false,\"inferenceprovenance\":\"\",\"provenanceaction\":{\"classid\":\"\",\"classname\":\"\",\"schemeid\":\"\",\"schemename\":\"\"}}}],\"extraInfo\":[],\"dataInfo\":{\"invisible\":false,\"inferred\":true,\"deletedbyinference\":true,\"inferenceprovenance\":\"dedup-similarity-result-levenstein\",\"provenanceaction\":{\"classid\":\"sysimport:crosswalk:repository\",\"classname\":\"sysimport:crosswalk:repository\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"}},\"lastupdatetimestamp\":0}"},"relation":null,"target":null} \ No newline at end of file diff --git a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json b/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json deleted file mode 100644 index 0065b6799..000000000 --- a/dhp-workflows/dhp-graph-provision/src/test/resources/eu/dnetlib/dhp/graph/software.json +++ /dev/null @@ -1 +0,0 @@ -{"type":"software","entity":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"id":"50|od______2659::05817f64c43a918a07483340b5726f77","originalId":["oai:zenodo.org:204139"],"collectedfrom":[{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}}],"pid":[],"extraInfo":[],"author":[],"resulttype":{"classid":"software","classname":"software","schemeid":"dnet:result_typologies","schemename":"dnet:result_typologies"},"language":{"classid":"und","classname":"Undetermined","schemeid":"dnet:languages","schemename":"dnet:languages"},"country":[],"subject":[],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"2016-01-01","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"refereed":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"context":[],"instance":[{"license":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"accessright":{"classid":"OPEN","classname":"Open Access","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"instancetype":{"classid":"0029","classname":"Software","schemeid":"dnet:publication_resource","schemename":"dnet:publication_resource"},"hostedby":{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"url":[],"distributionlocation":"","collectedfrom":{"key":"10|opendoar____::358aee4cc897452c00244351e4d91f69","value":"ZENODO","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"dateofacceptance":{"value":"2016-01-01","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}}}],"documentationUrl":[],"license":[],"codeRepositoryUrl":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"programmingLanguage":{"classid":"","classname":"","schemeid":"","schemename":""}},"links":[{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::e2a38892773e6541ec7c07aa605ad581"},"relatedEntity":{"id":"40|corda__h2020::e2a38892773e6541ec7c07aa605ad581","type":"project","projectTitle":"Engaging the EGI Community towards an Open Science Commons","code":"654142","acronym":"EGI-Engage","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::4d31ccb13726266f9098129756e03f43"},"relatedEntity":{"id":"40|corda_______::4d31ccb13726266f9098129756e03f43","type":"project","projectTitle":"Common Operations of Environmental Research Infrastructures","code":"283465","acronym":"ENVRI","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::5af7655a8e0e871cf16072b4b6ab9b41"},"relatedEntity":{"id":"40|corda_______::5af7655a8e0e871cf16072b4b6ab9b41","type":"project","projectTitle":"Data e-Infrastructure Initiative for Fisheries Management and Conservation of Marine Living Resources","code":"283644","acronym":"IMARINE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::e8da2e3e130ad3b1a650487d9ff126e4"},"relatedEntity":{"id":"40|corda_______::e8da2e3e130ad3b1a650487d9ff126e4","type":"project","projectTitle":"EU-Brazil Open Data and Cloud Computing e-Infrastructure for Biodiversity","code":"288754","acronym":"EUBRAZILOPENBIO","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP1::ICTInformation and Communication TechnologiesICTec:programec__________::EC::FP7::SP1SP1-CooperationSP1ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::15463ed3cba51f042181197cfabb2ff5"},"relatedEntity":{"id":"40|corda_______::15463ed3cba51f042181197cfabb2ff5","type":"project","projectTitle":"Data Infrastructure Ecosystem for Science","code":"239019","acronym":"D4SCIENCE-II","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::4d46893df18bb77f5d817b8ce98ac56c"},"relatedEntity":{"id":"40|corda__h2020::4d46893df18bb77f5d817b8ce98ac56c","type":"project","projectTitle":"Pooling Activities, Resources and Tools for Heritage E-research Networking, Optimization and Synergies","code":"654119","acronym":"PARTHENOS","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda_______::7f18b83690e3a18134b9a3db66d882d3"},"relatedEntity":{"id":"40|corda_______::7f18b83690e3a18134b9a3db66d882d3","type":"project","projectTitle":"DIstributed colLaboratories Infrastructure on Grid ENabled Technology 4 Science","code":"212488","acronym":"D4SCIENCE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::FP7::SP4::INFRAResearch InfrastructuresINFRAec:programec__________::EC::FP7::SP4SP4-CapacitiesSP4ec:specificprogramec__________::EC::FP7SEVENTH FRAMEWORK PROGRAMMEFP7ec:frameworkprogram, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::6729c0ee95de7724deb60454bb4179de"},"relatedEntity":{"id":"40|corda__h2020::6729c0ee95de7724deb60454bb4179de","type":"project","projectTitle":"Building Research environments for fostering Innovation, Decision making, Governance and Education to support Blue growth","code":"675680","acronym":"BlueBRIDGE","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::0da81b3ad78047f577dd405e8a2d7f07"},"relatedEntity":{"id":"40|corda__h2020::0da81b3ad78047f577dd405e8a2d7f07","type":"project","projectTitle":"Environmental Research Infrastructures Providing Shared Solutions for Science and Society","code":"654182","acronym":"ENVRI PLUS","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}},{"relation":{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":"","provenanceaction":{"classid":"sysimport:crosswalk:repository","classname":"sysimport:crosswalk:repository","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}},"lastupdatetimestamp":0,"relType":"resultProject","subRelType":"outcome","relClass":"isProducedBy","source":"50|od______2659::05817f64c43a918a07483340b5726f77","target":"40|corda__h2020::e7f5e7755409fc74eea9d168ab795634"},"relatedEntity":{"id":"40|corda__h2020::e7f5e7755409fc74eea9d168ab795634","type":"project","projectTitle":"SoBigData Research Infrastructure","code":"654024","acronym":"SoBigData","contracttype":{},"fundingtree":["{value=ec__________::ECECEuropean CommissionEUec__________::EC::H2020::RIAResearch and Innovation actionRIAec:h2020toasec__________::EC::H2020H2020Horizon 2020 Framework Programmeec:h2020fundings, dataInfo={invisible=false, inferred=false, deletedbyinference=false, trust=, inferenceprovenance=, provenanceaction={classid=, classname=, schemeid=, schemename=}}}"]}}]} \ No newline at end of file