forked from D-Net/dnet-hadoop
Added index wf
This commit is contained in:
parent
2b8675462f
commit
b021b8a2e1
|
@ -65,7 +65,7 @@ public class DHPUtils {
|
||||||
return (String) o;
|
return (String) o;
|
||||||
if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
|
if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
|
||||||
return (String) ((JSONArray) o).get(0);
|
return (String) ((JSONArray) o).get(0);
|
||||||
return "";
|
return o.toString();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,6 @@ public class SparkPropagateRelationsJob {
|
||||||
SOURCE,
|
SOURCE,
|
||||||
TARGET
|
TARGET
|
||||||
}
|
}
|
||||||
final static String IDJSONPATH = "$.id";
|
|
||||||
final static String SOURCEJSONPATH = "$.source";
|
final static String SOURCEJSONPATH = "$.source";
|
||||||
final static String TARGETJSONPATH = "$.target";
|
final static String TARGETJSONPATH = "$.target";
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@ public class SparkUpdateEntityJob {
|
||||||
final String mergeRelPath = parser.get("mergeRelPath");
|
final String mergeRelPath = parser.get("mergeRelPath");
|
||||||
final String dedupRecordPath = parser.get("dedupRecordPath");
|
final String dedupRecordPath = parser.get("dedupRecordPath");
|
||||||
final String entity = parser.get("entity");
|
final String entity = parser.get("entity");
|
||||||
|
final String destination = parser.get("targetPath");
|
||||||
|
|
||||||
final Dataset<Relation> df = spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
final Dataset<Relation> df = spark.read().load(mergeRelPath).as(Encoders.bean(Relation.class));
|
||||||
final JavaPairRDD<String, String> mergedIds = df
|
final JavaPairRDD<String, String> mergedIds = df
|
||||||
|
@ -63,7 +64,7 @@ public class SparkUpdateEntityJob {
|
||||||
.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(DHPUtils.getJPathString(TARGETJSONPATH, s), s))
|
.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(DHPUtils.getJPathString(TARGETJSONPATH, s), s))
|
||||||
.leftOuterJoin(mergedIds)
|
.leftOuterJoin(mergedIds)
|
||||||
.map(k -> k._2()._2().isPresent() ? updateDeletedByInference(k._2()._1(), Relation.class) : k._2()._1())
|
.map(k -> k._2()._2().isPresent() ? updateDeletedByInference(k._2()._1(), Relation.class) : k._2()._1())
|
||||||
.saveAsTextFile(entityPath + "_new", GzipCodec.class);
|
.saveAsTextFile(destination, GzipCodec.class);
|
||||||
} else {
|
} else {
|
||||||
final JavaRDD<String> dedupEntity = sc.textFile(dedupRecordPath);
|
final JavaRDD<String> dedupEntity = sc.textFile(dedupRecordPath);
|
||||||
JavaPairRDD<String, String> entitiesWithId = sourceEntity.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(DHPUtils.getJPathString(IDJSONPATH, s), s));
|
JavaPairRDD<String, String> entitiesWithId = sourceEntity.mapToPair((PairFunction<String, String, String>) s -> new Tuple2<>(DHPUtils.getJPathString(IDJSONPATH, s), s));
|
||||||
|
@ -86,7 +87,7 @@ public class SparkUpdateEntityJob {
|
||||||
JavaRDD<String> map = entitiesWithId.leftOuterJoin(mergedIds).map(k -> k._2()._2().isPresent() ? updateDeletedByInference(k._2()._1(), mainClass) : k._2()._1());
|
JavaRDD<String> map = entitiesWithId.leftOuterJoin(mergedIds).map(k -> k._2()._2().isPresent() ? updateDeletedByInference(k._2()._1(), mainClass) : k._2()._1());
|
||||||
|
|
||||||
|
|
||||||
map.union(dedupEntity).saveAsTextFile(entityPath + "_new", GzipCodec.class);
|
map.union(dedupEntity).saveAsTextFile(destination, GzipCodec.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -22,10 +22,17 @@
|
||||||
"paramLongName": "dedupRecordPath",
|
"paramLongName": "dedupRecordPath",
|
||||||
"paramDescription": "the inputPath of dedup record",
|
"paramDescription": "the inputPath of dedup record",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"paramName": "e",
|
"paramName": "e",
|
||||||
"paramLongName": "entity",
|
"paramLongName": "entity",
|
||||||
"paramDescription": "the type of entity",
|
"paramDescription": "the type of entity",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
}
|
},
|
||||||
|
{
|
||||||
|
"paramName": "t",
|
||||||
|
"paramLongName": "targetPath",
|
||||||
|
"paramDescription": "the targetPath",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
|
@ -26,7 +26,7 @@
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="DeleteWorkingPath"/>
|
<start to="updateDeletedByInferenceRelation"/>
|
||||||
|
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
|
@ -55,8 +55,7 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory ${sparkExecutorMemory}
|
--executor-memory ${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--num-executors 100
|
${sparkExtraOPT}
|
||||||
--conf spark.yarn.jars="hdfs://hadoop-rm1.garr-pa1.d4science.org:8020/user/oozie/share/lib/lib_20180405103059/spark2"
|
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-mt</arg><arg>yarn-cluster</arg>
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
@ -80,8 +79,7 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory ${sparkExecutorMemory}
|
--executor-memory ${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--num-executors 100
|
${sparkExtraOPT}
|
||||||
--conf spark.yarn.jars="hdfs://hadoop-rm1.garr-pa1.d4science.org:8020/user/oozie/share/lib/lib_20180405103059/spark2"
|
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-mt</arg><arg>yarn-cluster</arg>
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
@ -105,8 +103,7 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory ${sparkExecutorMemory}
|
--executor-memory ${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--num-executors 100
|
${sparkExtraOPT}
|
||||||
--conf spark.yarn.jars="hdfs://hadoop-rm1.garr-pa1.d4science.org:8020/user/oozie/share/lib/lib_20180405103059/spark2"
|
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-mt</arg><arg>yarn-cluster</arg>
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
@ -130,14 +127,76 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory ${sparkExecutorMemory}
|
--executor-memory ${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
--num-executors 100
|
${sparkExtraOPT}
|
||||||
--conf spark.yarn.jars="hdfs://hadoop-rm1.garr-pa1.d4science.org:8020/user/oozie/share/lib/lib_20180405103059/spark2"
|
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>-mt</arg><arg>yarn-cluster</arg>
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
<arg>--mergeRelPath</arg><arg>${targetPath}/${entity}/mergeRel</arg>
|
<arg>--mergeRelPath</arg><arg>${targetPath}/${entity}/mergeRel</arg>
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
||||||
<arg>--targetRelPath</arg><arg>${targetPath}/${entity}/relation_updated</arg>
|
<arg>--targetRelPath</arg><arg>${targetPath}/${entity}/relation_propagated</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
<ok to="updateDeletedByInferenceEntity"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="updateDeletedByInferenceEntity">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Update ${entity} and add DedupRecord</name>
|
||||||
|
<class>eu.dnetlib.dedup.SparkUpdateEntityJob</class>
|
||||||
|
<jar>dhp-dedup-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory ${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
${sparkExtraOPT}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
|
<arg>--entityPath</arg><arg>${sourcePath}/${entity}</arg>
|
||||||
|
<arg>--mergeRelPath</arg><arg>${targetPath}/${entity}/mergeRel</arg>
|
||||||
|
<arg>--entity</arg><arg>${entity}</arg>
|
||||||
|
<arg>--dedupRecordPath</arg><arg>${targetPath}/${entity}/dedup_records</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/${entity}/updated_record</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="updateDeletedByInferenceRelation"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="updateDeletedByInferenceRelation">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Update ${entity} set deleted by Inference</name>
|
||||||
|
<class>eu.dnetlib.dedup.SparkUpdateEntityJob</class>
|
||||||
|
<jar>dhp-dedup-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory ${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
${sparkExtraOPT}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>-mt</arg><arg>yarn-cluster</arg>
|
||||||
|
<arg>--entityPath</arg><arg>${targetPath}/${entity}/relation_propagated</arg>
|
||||||
|
<arg>--mergeRelPath</arg><arg>${targetPath}/${entity}/mergeRel</arg>
|
||||||
|
<arg>--entity</arg><arg>relation</arg>
|
||||||
|
<arg>--dedupRecordPath</arg><arg>${targetPath}/${entity}/dedup_records</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/${entity}/updated_relation</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="replaceEntity"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="replaceEntity">
|
||||||
|
<fs>
|
||||||
|
<delete path='${sourcePath}/${entity}'/>
|
||||||
|
<delete path='${sourcePath}/relation'/>
|
||||||
|
<move source="${targetPath}/${entity}/updated_relation" target="${sourcePath}/relation" />
|
||||||
|
<move source="${targetPath}/${entity}/updated_record" target="${sourcePath}/${entity}" />
|
||||||
|
</fs>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.0.5-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<artifactId>dhp-graph-provision</artifactId>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_2.11</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_2.11</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-schemas</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.elasticsearch</groupId>
|
||||||
|
<artifactId>elasticsearch-hadoop</artifactId>
|
||||||
|
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,47 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.provision.scholix.Typology;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
public class ProvisionUtil {
|
||||||
|
|
||||||
|
public final static String deletedByInferenceJPATH = "$.dataInfo.deletedbyinference";
|
||||||
|
public final static String TARGETJSONPATH = "$.target";
|
||||||
|
public final static String SOURCEJSONPATH = "$.source";
|
||||||
|
|
||||||
|
public static RelatedItemInfo getItemType(final String item, final String idPath) {
|
||||||
|
String targetId = DHPUtils.getJPathString(idPath, item);
|
||||||
|
switch (StringUtils.substringBefore(targetId, "|")) {
|
||||||
|
case "50":
|
||||||
|
return new RelatedItemInfo().setRelatedPublication(1);
|
||||||
|
case "60":
|
||||||
|
return new RelatedItemInfo().setRelatedDataset(1);
|
||||||
|
case "70":
|
||||||
|
return new RelatedItemInfo().setRelatedUnknown(1);
|
||||||
|
default:
|
||||||
|
throw new RuntimeException("Unknonw target ID");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Boolean isNotDeleted(final String item) {
|
||||||
|
return !"true".equalsIgnoreCase(DHPUtils.getJPathString(deletedByInferenceJPATH, item));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Typology getItemTypeFromId(String id) {
|
||||||
|
|
||||||
|
switch (StringUtils.substringBefore(id, "|")) {
|
||||||
|
case "50":
|
||||||
|
return Typology.publication;
|
||||||
|
case "60":
|
||||||
|
return Typology.dataset;
|
||||||
|
case "70":
|
||||||
|
return Typology.unknown;
|
||||||
|
default:
|
||||||
|
throw new RuntimeException("Unknonw ID type");
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,64 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class models the information of related items
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class RelatedItemInfo implements Serializable {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
private int relatedDataset = 0;
|
||||||
|
|
||||||
|
private int relatedPublication = 0;
|
||||||
|
|
||||||
|
private int relatedUnknown = 0;
|
||||||
|
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RelatedItemInfo setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RelatedItemInfo add(RelatedItemInfo other) {
|
||||||
|
if (other != null) {
|
||||||
|
relatedDataset += other.getRelatedDataset();
|
||||||
|
relatedPublication += other.getRelatedPublication();
|
||||||
|
relatedUnknown += other.getRelatedUnknown();
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedDataset() {
|
||||||
|
return relatedDataset;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RelatedItemInfo setRelatedDataset(int relatedDataset) {
|
||||||
|
this.relatedDataset = relatedDataset;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedPublication() {
|
||||||
|
return relatedPublication;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RelatedItemInfo setRelatedPublication(int relatedPublication) {
|
||||||
|
this.relatedPublication = relatedPublication;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedUnknown() {
|
||||||
|
return relatedUnknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RelatedItemInfo setRelatedUnknown(int relatedUnknown) {
|
||||||
|
this.relatedUnknown = relatedUnknown;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.jayway.jsonpath.JsonPath;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
import net.minidev.json.JSONArray;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.Function2;
|
||||||
|
import org.apache.spark.api.java.function.PairFunction;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SparkExtractRelationCount is a spark job that takes in input relation RDD
|
||||||
|
* and retrieve for each item in relation which are the number of
|
||||||
|
* - Related Dataset
|
||||||
|
* - Related Publication
|
||||||
|
* - Related Unknown
|
||||||
|
*/
|
||||||
|
public class SparkExtractRelationCount {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkExtractRelationCount.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_related_entities_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
final SparkSession spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(SparkExtractRelationCount.class.getSimpleName())
|
||||||
|
.master(parser.get("master"))
|
||||||
|
.getOrCreate();
|
||||||
|
|
||||||
|
|
||||||
|
final String workingDirPath = parser.get("workingDirPath");
|
||||||
|
|
||||||
|
final String relationPath = parser.get("relationPath");
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
sc.textFile(relationPath)
|
||||||
|
// We start to Filter the relation not deleted by Inference
|
||||||
|
.filter(ProvisionUtil::isNotDeleted)
|
||||||
|
// Then we create a PairRDD<String, RelatedItem>
|
||||||
|
.mapToPair((PairFunction<String, String, RelatedItemInfo>) f
|
||||||
|
-> new Tuple2<>(DHPUtils.getJPathString(ProvisionUtil.SOURCEJSONPATH, f), ProvisionUtil.getItemType(f, ProvisionUtil.TARGETJSONPATH)))
|
||||||
|
//We reduce and sum the number of Relations
|
||||||
|
.reduceByKey((Function2<RelatedItemInfo, RelatedItemInfo, RelatedItemInfo>) (v1, v2) -> {
|
||||||
|
if (v1 == null && v2 == null)
|
||||||
|
return new RelatedItemInfo();
|
||||||
|
return v1 != null ? v1.add(v2) : v2;
|
||||||
|
})
|
||||||
|
//Set the source Id in RelatedItem object
|
||||||
|
.map(k -> k._2().setId(k._1()))
|
||||||
|
// Convert to JSON and save as TextFile
|
||||||
|
.map(k -> {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
return mapper.writeValueAsString(k);
|
||||||
|
}).saveAsTextFile(workingDirPath + "/relatedItemCount", GzipCodec.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.provision.scholix.ScholixSummary;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.Function;
|
||||||
|
import org.apache.spark.api.java.function.PairFunction;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class SparkGenerateSummary {
|
||||||
|
|
||||||
|
private static final String jsonIDPath = "$.id";
|
||||||
|
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateSummary.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
final SparkSession spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(SparkExtractRelationCount.class.getSimpleName())
|
||||||
|
.master(parser.get("master"))
|
||||||
|
.getOrCreate();
|
||||||
|
|
||||||
|
|
||||||
|
final String graphPath = parser.get("graphPath");
|
||||||
|
final String workingDirPath = parser.get("workingDirPath");
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
JavaPairRDD<String, String> relationCount = sc.textFile(workingDirPath+"/relatedItemCount").mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(jsonIDPath, i), i));
|
||||||
|
|
||||||
|
JavaPairRDD<String, String> entities =
|
||||||
|
sc.textFile(graphPath + "/publication")
|
||||||
|
.filter(ProvisionUtil::isNotDeleted)
|
||||||
|
.mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(jsonIDPath, i), i))
|
||||||
|
.union(
|
||||||
|
sc.textFile(graphPath + "/dataset")
|
||||||
|
.filter(ProvisionUtil::isNotDeleted)
|
||||||
|
.mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(jsonIDPath, i), i))
|
||||||
|
)
|
||||||
|
.union(
|
||||||
|
sc.textFile(graphPath + "/unknown")
|
||||||
|
.filter(ProvisionUtil::isNotDeleted)
|
||||||
|
.mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(jsonIDPath, i), i))
|
||||||
|
);
|
||||||
|
entities.join(relationCount).map((Function<Tuple2<String, Tuple2<String, String>>, String>) k ->
|
||||||
|
ScholixSummary.fromJsonOAF(ProvisionUtil.getItemTypeFromId(k._1()), k._2()._1(), k._2()._2())).saveAsTextFile(workingDirPath+"/summary", GzipCodec.class);
|
||||||
|
|
||||||
|
|
||||||
|
;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.provision.scholix.ScholixSummary;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class SparkIndexCollectionOnES {
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception{
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkIndexCollectionOnES.class.getResourceAsStream("/eu/dnetlib/dhp/provision/index_on_es.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf().setAppName(SparkIndexCollectionOnES.class.getSimpleName())
|
||||||
|
.setMaster(parser.get("master"));
|
||||||
|
|
||||||
|
|
||||||
|
final String sourcePath = parser.get("sourcePath");
|
||||||
|
final String index = parser.get("index");
|
||||||
|
|
||||||
|
final SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
|
||||||
|
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<String> inputRdd = sc.textFile(sourcePath);
|
||||||
|
|
||||||
|
Map<String, String> esCfg = new HashMap<>();
|
||||||
|
esCfg.put("es.nodes", "10.19.65.51, 10.19.65.52, 10.19.65.53, 10.19.65.54");
|
||||||
|
esCfg.put("es.mapping.id", "id");
|
||||||
|
esCfg.put("es.batch.write.retry.count", "8");
|
||||||
|
esCfg.put("es.batch.write.retry.wait", "60s");
|
||||||
|
esCfg.put("es.batch.size.entries", "200");
|
||||||
|
esCfg.put("es.nodes.wan.only", "true");
|
||||||
|
|
||||||
|
|
||||||
|
JavaEsSpark.saveJsonToEs(inputRdd,index, esCfg);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
package eu.dnetlib.dhp.provision.scholix;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class CollectedFromType implements Serializable {
|
||||||
|
|
||||||
|
private String datasourceName;
|
||||||
|
private String datasourceId;
|
||||||
|
private String completionStatus;
|
||||||
|
|
||||||
|
|
||||||
|
public CollectedFromType() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectedFromType(String datasourceName, String datasourceId, String completionStatus) {
|
||||||
|
this.datasourceName = datasourceName;
|
||||||
|
this.datasourceId = datasourceId;
|
||||||
|
this.completionStatus = completionStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDatasourceName() {
|
||||||
|
return datasourceName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDatasourceName(String datasourceName) {
|
||||||
|
this.datasourceName = datasourceName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDatasourceId() {
|
||||||
|
return datasourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDatasourceId(String datasourceId) {
|
||||||
|
this.datasourceId = datasourceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCompletionStatus() {
|
||||||
|
return completionStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCompletionStatus(String completionStatus) {
|
||||||
|
this.completionStatus = completionStatus;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
package eu.dnetlib.dhp.provision.scholix;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class SchemeValue implements Serializable {
|
||||||
|
private String scheme;
|
||||||
|
private String value;
|
||||||
|
|
||||||
|
public SchemeValue() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public SchemeValue(String scheme, String value) {
|
||||||
|
this.scheme = scheme;
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getScheme() {
|
||||||
|
return scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setScheme(String scheme) {
|
||||||
|
this.scheme = scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(String value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,289 @@
|
||||||
|
package eu.dnetlib.dhp.provision.scholix;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import eu.dnetlib.dhp.provision.RelatedItemInfo;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.DLIDataset;
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.DLIPublication;
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.DLIUnknown;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
public class ScholixSummary implements Serializable {
|
||||||
|
private String id;
|
||||||
|
private List<TypedIdentifier> localIdentifier;
|
||||||
|
private Typology typology;
|
||||||
|
private List<String> title;
|
||||||
|
private List<String> author;
|
||||||
|
private List<String> date;
|
||||||
|
private String description;
|
||||||
|
private List<SchemeValue> subject;
|
||||||
|
private List<String> publisher;
|
||||||
|
private int relatedPublications;
|
||||||
|
private int relatedDatasets;
|
||||||
|
private int relatedUnknown;
|
||||||
|
private List<CollectedFromType> datasources;
|
||||||
|
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<TypedIdentifier> getLocalIdentifier() {
|
||||||
|
return localIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLocalIdentifier(List<TypedIdentifier> localIdentifier) {
|
||||||
|
this.localIdentifier = localIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Typology getTypology() {
|
||||||
|
return typology;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTypology(Typology typology) {
|
||||||
|
this.typology = typology;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getTitle() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(List<String> title) {
|
||||||
|
this.title = title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getAuthor() {
|
||||||
|
return author;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAuthor(List<String> author) {
|
||||||
|
this.author = author;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getDate() {
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDate(List<String> date) {
|
||||||
|
this.date = date;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty("abstract")
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
@JsonProperty("abstract")
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<SchemeValue> getSubject() {
|
||||||
|
return subject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSubject(List<SchemeValue> subject) {
|
||||||
|
this.subject = subject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getPublisher() {
|
||||||
|
return publisher;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPublisher(List<String> publisher) {
|
||||||
|
this.publisher = publisher;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedPublications() {
|
||||||
|
return relatedPublications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRelatedPublications(int relatedPublications) {
|
||||||
|
this.relatedPublications = relatedPublications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedDatasets() {
|
||||||
|
return relatedDatasets;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRelatedDatasets(int relatedDatasets) {
|
||||||
|
this.relatedDatasets = relatedDatasets;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getRelatedUnknown() {
|
||||||
|
return relatedUnknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRelatedUnknown(int relatedUnknown) {
|
||||||
|
this.relatedUnknown = relatedUnknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<CollectedFromType> getDatasources() {
|
||||||
|
return datasources;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDatasources(List<CollectedFromType> datasources) {
|
||||||
|
this.datasources = datasources;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static String fromJsonOAF(final Typology oafType, final String oafJson, final String relEntityJson) {
|
||||||
|
try {
|
||||||
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
|
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||||
|
|
||||||
|
RelatedItemInfo relatedItemInfo = mapper.readValue(relEntityJson, RelatedItemInfo.class);
|
||||||
|
|
||||||
|
switch (oafType) {
|
||||||
|
case dataset:
|
||||||
|
return mapper.writeValueAsString(summaryFromDataset(mapper.readValue(oafJson, DLIDataset.class), relatedItemInfo));
|
||||||
|
case publication:
|
||||||
|
return mapper.writeValueAsString(summaryFromPublication(mapper.readValue(oafJson, DLIPublication.class), relatedItemInfo));
|
||||||
|
case unknown:
|
||||||
|
return mapper.writeValueAsString(summaryFromUnknown(mapper.readValue(oafJson, DLIUnknown.class), relatedItemInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static ScholixSummary summaryFromDataset(final DLIDataset item, final RelatedItemInfo relatedItemInfo) {
|
||||||
|
ScholixSummary summary = new ScholixSummary();
|
||||||
|
summary.setId(item.getId());
|
||||||
|
|
||||||
|
if (item.getPid() != null)
|
||||||
|
summary.setLocalIdentifier(item.getPid().stream()
|
||||||
|
.map(p -> new TypedIdentifier(p.getValue(), p.getQualifier().getClassid()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
|
||||||
|
summary.setTypology(Typology.dataset);
|
||||||
|
if (item.getTitle() != null)
|
||||||
|
summary.setTitle(item.getTitle().stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||||
|
|
||||||
|
if (item.getAuthor() != null) {
|
||||||
|
summary.setAuthor(item.getAuthor().stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (item.getRelevantdate() != null)
|
||||||
|
summary.setDate(
|
||||||
|
item.getRelevantdate().stream()
|
||||||
|
.filter(d -> "date".equalsIgnoreCase(d.getQualifier().getClassname()))
|
||||||
|
.map(StructuredProperty::getValue)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
|
||||||
|
if (item.getDescription() != null && item.getDescription().size() > 0)
|
||||||
|
summary.setDescription(item.getDescription().get(0).getValue());
|
||||||
|
|
||||||
|
if (item.getSubject() != null) {
|
||||||
|
summary.setSubject(item.getSubject().stream()
|
||||||
|
.map(s -> new SchemeValue(s.getQualifier().getClassid(), s.getValue()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
summary.setRelatedDatasets(relatedItemInfo.getRelatedDataset());
|
||||||
|
summary.setRelatedPublications(relatedItemInfo.getRelatedPublication());
|
||||||
|
summary.setRelatedUnknown(relatedItemInfo.getRelatedUnknown());
|
||||||
|
|
||||||
|
if (item.getDlicollectedfrom() != null)
|
||||||
|
summary.setDatasources(item.getDlicollectedfrom().stream()
|
||||||
|
.map(
|
||||||
|
c -> new CollectedFromType(c.getName(), c.getId(), c.getCompletionStatus())
|
||||||
|
).collect(Collectors.toList()));
|
||||||
|
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ScholixSummary summaryFromPublication(final DLIPublication item, final RelatedItemInfo relatedItemInfo) {
|
||||||
|
ScholixSummary summary = new ScholixSummary();
|
||||||
|
summary.setId(item.getId());
|
||||||
|
|
||||||
|
if (item.getPid() != null)
|
||||||
|
summary.setLocalIdentifier(item.getPid().stream()
|
||||||
|
.map(p -> new TypedIdentifier(p.getValue(), p.getQualifier().getClassid()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
|
||||||
|
summary.setTypology(Typology.dataset);
|
||||||
|
if (item.getTitle() != null)
|
||||||
|
summary.setTitle(item.getTitle().stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
||||||
|
|
||||||
|
if (item.getAuthor() != null) {
|
||||||
|
summary.setAuthor(item.getAuthor().stream().map(Author::getFullname).collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (item.getRelevantdate() != null)
|
||||||
|
summary.setDate(
|
||||||
|
item.getRelevantdate().stream()
|
||||||
|
.filter(d -> "date".equalsIgnoreCase(d.getQualifier().getClassname()))
|
||||||
|
.map(StructuredProperty::getValue)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
|
||||||
|
if (item.getDescription() != null && item.getDescription().size() > 0)
|
||||||
|
summary.setDescription(item.getDescription().get(0).getValue());
|
||||||
|
|
||||||
|
if (item.getSubject() != null) {
|
||||||
|
summary.setSubject(item.getSubject().stream()
|
||||||
|
.map(s -> new SchemeValue(s.getQualifier().getClassid(), s.getValue()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
summary.setRelatedDatasets(relatedItemInfo.getRelatedDataset());
|
||||||
|
summary.setRelatedPublications(relatedItemInfo.getRelatedPublication());
|
||||||
|
summary.setRelatedUnknown(relatedItemInfo.getRelatedUnknown());
|
||||||
|
|
||||||
|
if (item.getDlicollectedfrom() != null)
|
||||||
|
summary.setDatasources(item.getDlicollectedfrom().stream()
|
||||||
|
.map(
|
||||||
|
c -> new CollectedFromType(c.getName(), c.getId(), c.getCompletionStatus())
|
||||||
|
).collect(Collectors.toList()));
|
||||||
|
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ScholixSummary summaryFromUnknown(final DLIUnknown item, final RelatedItemInfo relatedItemInfo) {
|
||||||
|
ScholixSummary summary = new ScholixSummary();
|
||||||
|
summary.setId(item.getId());
|
||||||
|
if (item.getPid() != null)
|
||||||
|
summary.setLocalIdentifier(item.getPid().stream()
|
||||||
|
.map(p -> new TypedIdentifier(p.getValue(), p.getQualifier().getClassid()))
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
);
|
||||||
|
|
||||||
|
summary.setRelatedDatasets(relatedItemInfo.getRelatedDataset());
|
||||||
|
summary.setRelatedPublications(relatedItemInfo.getRelatedPublication());
|
||||||
|
summary.setRelatedUnknown(relatedItemInfo.getRelatedUnknown());
|
||||||
|
|
||||||
|
if (item.getDlicollectedfrom() != null)
|
||||||
|
summary.setDatasources(item.getDlicollectedfrom().stream()
|
||||||
|
.map(
|
||||||
|
c -> new CollectedFromType(c.getName(), c.getId(), c.getCompletionStatus())
|
||||||
|
).collect(Collectors.toList()));
|
||||||
|
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
package eu.dnetlib.dhp.provision.scholix;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class TypedIdentifier implements Serializable {
|
||||||
|
private String id;
|
||||||
|
private String type;
|
||||||
|
|
||||||
|
public TypedIdentifier() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public TypedIdentifier(String id, String type) {
|
||||||
|
this.id = id;
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setType(String type) {
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
package eu.dnetlib.dhp.provision.scholix;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public enum Typology implements Serializable {
|
||||||
|
dataset,
|
||||||
|
publication,
|
||||||
|
unknown
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,100 @@
|
||||||
|
<workflow-app name="import_infospace_graph" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>workingDirPath</name>
|
||||||
|
<description>the source path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>graphPath</name>
|
||||||
|
<description>the graph path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>index</name>
|
||||||
|
<description>index name</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<start to="indexSummary"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
<action name="DeleteTargetPath">
|
||||||
|
<fs>
|
||||||
|
<delete path='${workingDirPath}'/>
|
||||||
|
<mkdir path='${workingDirPath}'/>
|
||||||
|
</fs>
|
||||||
|
<ok to="CalculateRelatedItem"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="CalculateRelatedItem">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>calculate for each ID the number of related Dataset, publication and Unknown</name>
|
||||||
|
<class>eu.dnetlib.dhp.provision.SparkExtractRelationCount</class>
|
||||||
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
|
||||||
|
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||||
|
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
|
||||||
|
<arg>--relationPath</arg><arg>${graphPath}/relation</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="generateSummary"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="generateSummary">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>generate Summary</name>
|
||||||
|
<class>eu.dnetlib.dhp.provision.SparkGenerateSummary</class>
|
||||||
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
|
||||||
|
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||||
|
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
|
||||||
|
<arg>--graphPath</arg><arg>${graphPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="indexSummary"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="indexSummary">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>generate Summary</name>
|
||||||
|
<class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class>
|
||||||
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --num-executors 20 --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} </spark-opts>
|
||||||
|
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDirPath}/summary</arg>
|
||||||
|
<arg>--index</arg><arg>${index}_object</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<end name="End"/>
|
||||||
|
|
||||||
|
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "mt",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "s",
|
||||||
|
"paramLongName": "sourcePath",
|
||||||
|
"paramDescription": "the working path where generated files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "i",
|
||||||
|
"paramLongName": "index",
|
||||||
|
"paramDescription": "the index name",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "mt",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "w",
|
||||||
|
"paramLongName": "workingDirPath",
|
||||||
|
"paramDescription": "the working path where generated files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "g",
|
||||||
|
"paramLongName": "graphPath",
|
||||||
|
"paramDescription": "the relationPath path ",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "mt",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "w",
|
||||||
|
"paramLongName": "workingDirPath",
|
||||||
|
"paramDescription": "the working path where generated files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "r",
|
||||||
|
"paramLongName": "relationPath",
|
||||||
|
"paramDescription": "the relationPath path ",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,48 @@
|
||||||
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.provision.scholix.ScholixSummary;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.Ignore;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class ExtractInfoTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test() throws Exception {
|
||||||
|
|
||||||
|
final String json = IOUtils.toString(getClass().getResourceAsStream("record.json"));
|
||||||
|
|
||||||
|
|
||||||
|
ProvisionUtil.getItemType(json,ProvisionUtil.TARGETJSONPATH);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSerialization() throws Exception {
|
||||||
|
|
||||||
|
ScholixSummary summary = new ScholixSummary();
|
||||||
|
summary.setDescription("descrizione");
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
String json = mapper.writeValueAsString(summary);
|
||||||
|
System.out.println(json);
|
||||||
|
System.out.println(mapper.readValue(json, ScholixSummary.class).getDescription());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Ignore
|
||||||
|
public void testIndex() throws Exception {
|
||||||
|
SparkIndexCollectionOnES.main(
|
||||||
|
|
||||||
|
new String[] {
|
||||||
|
"-mt", "local[*]",
|
||||||
|
"-s", "/home/sandro/dli",
|
||||||
|
"-i", "dli_object"
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
{"dataInfo":{"invisible":false,"inferred":null,"deletedbyinference":false,"trust":"0.9","inferenceprovenance":null,"provenanceaction":null},"lastupdatetimestamp":null,"relType":"references","subRelType":null,"relClass":"datacite","source":"50|f2123fce7e56c73dc8f1bf64ec59b477","target":"50|b618cbe39ba940a29993ac324e5f9621","collectedFrom":[{"key":"dli_________::datacite","value":"Datasets in Datacite","dataInfo":null}]}
|
|
@ -18,6 +18,7 @@
|
||||||
<module>dhp-distcp</module>
|
<module>dhp-distcp</module>
|
||||||
<module>dhp-graph-mapper</module>
|
<module>dhp-graph-mapper</module>
|
||||||
<module>dhp-dedup</module>
|
<module>dhp-dedup</module>
|
||||||
|
<module>dhp-graph-provision</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
<pluginRepositories>
|
<pluginRepositories>
|
||||||
|
|
8
pom.xml
8
pom.xml
|
@ -243,6 +243,14 @@
|
||||||
<version>${vtd.version}</version>
|
<version>${vtd.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.elasticsearch</groupId>
|
||||||
|
<artifactId>elasticsearch-hadoop</artifactId>
|
||||||
|
<version>7.6.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.oozie</groupId>
|
<groupId>org.apache.oozie</groupId>
|
||||||
<artifactId>oozie-client</artifactId>
|
<artifactId>oozie-client</artifactId>
|
||||||
|
|
Loading…
Reference in New Issue