2020-03-27 10:42:17 +01:00
|
|
|
package eu.dnetlib.dhp.oa.dedup;
|
2020-03-20 13:01:56 +01:00
|
|
|
|
|
|
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
2020-04-06 16:30:31 +02:00
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2020-03-20 13:01:56 +01:00
|
|
|
import eu.dnetlib.pace.util.MapDocumentUtil;
|
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-04-06 16:30:31 +02:00
|
|
|
import org.apache.commons.logging.Log;
|
|
|
|
import org.apache.commons.logging.LogFactory;
|
2020-04-15 21:23:21 +02:00
|
|
|
import org.apache.hadoop.fs.FileStatus;
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2020-03-20 13:01:56 +01:00
|
|
|
import org.apache.hadoop.io.compress.GzipCodec;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.JavaPairRDD;
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
|
|
import org.apache.spark.api.java.Optional;
|
|
|
|
import org.apache.spark.api.java.function.Function;
|
|
|
|
import org.apache.spark.api.java.function.PairFunction;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.Row;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import scala.Tuple2;
|
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
public class SparkPropagateRelation extends AbstractSparkAction {
|
2020-03-20 13:01:56 +01:00
|
|
|
|
2020-04-15 21:23:21 +02:00
|
|
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
|
|
|
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);;
|
|
|
|
|
|
|
|
public static final int NUM_PARTITIONS = 3000;
|
|
|
|
|
2020-03-20 13:01:56 +01:00
|
|
|
enum FieldType {
|
|
|
|
SOURCE,
|
|
|
|
TARGET
|
|
|
|
}
|
|
|
|
|
|
|
|
final static String SOURCEJSONPATH = "$.source";
|
|
|
|
final static String TARGETJSONPATH = "$.target";
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
private static final Log log = LogFactory.getLog(SparkPropagateRelation.class);
|
|
|
|
|
|
|
|
public SparkPropagateRelation(ArgumentApplicationParser parser, SparkSession spark) throws Exception {
|
|
|
|
super(parser, spark);
|
|
|
|
}
|
|
|
|
|
2020-03-20 13:01:56 +01:00
|
|
|
public static void main(String[] args) throws Exception {
|
2020-04-06 16:30:31 +02:00
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2020-03-27 10:42:17 +01:00
|
|
|
IOUtils.toString(
|
2020-04-06 16:30:31 +02:00
|
|
|
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
|
2020-03-20 13:01:56 +01:00
|
|
|
parser.parseArgument(args);
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
new SparkPropagateRelation(parser, getSparkSession(parser)).run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
2020-03-20 13:01:56 +01:00
|
|
|
}
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
@Override
|
|
|
|
public void run(ISLookUpService isLookUpService) {
|
2020-03-20 13:01:56 +01:00
|
|
|
|
|
|
|
final String graphBasePath = parser.get("graphBasePath");
|
|
|
|
final String workingPath = parser.get("workingPath");
|
|
|
|
final String dedupGraphPath = parser.get("dedupGraphPath");
|
|
|
|
|
2020-04-15 16:34:06 +02:00
|
|
|
System.out.println(String.format("graphBasePath: '%s'", graphBasePath));
|
|
|
|
System.out.println(String.format("workingPath: '%s'", workingPath));
|
|
|
|
System.out.println(String.format("dedupGraphPath:'%s'", dedupGraphPath));
|
|
|
|
|
2020-04-15 21:23:21 +02:00
|
|
|
final String relationsPath = DedupUtility.createEntityPath(dedupGraphPath, "relation");
|
|
|
|
final String newRelsPath = DedupUtility.createEntityPath(workingPath, "newRels");
|
|
|
|
final String fixedSourceId = DedupUtility.createEntityPath(workingPath, "fixedSourceId");
|
|
|
|
final String deletedSourceId = DedupUtility.createEntityPath(workingPath, "deletedSourceId");
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
|
|
|
|
2020-04-15 21:23:21 +02:00
|
|
|
deletePath(relationsPath);
|
|
|
|
deletePath(newRelsPath);
|
|
|
|
deletePath(fixedSourceId);
|
|
|
|
deletePath(deletedSourceId);
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
final Dataset<Relation> mergeRels = spark.read().load(DedupUtility.createMergeRelPath(workingPath, "*", "*")).as(Encoders.bean(Relation.class));
|
|
|
|
|
|
|
|
final JavaPairRDD<String, String> mergedIds = mergeRels
|
|
|
|
.where("relClass == 'merges'")
|
|
|
|
.select(mergeRels.col("source"), mergeRels.col("target"))
|
|
|
|
.distinct()
|
|
|
|
.toJavaRDD()
|
|
|
|
.mapToPair((PairFunction<Row, String, String>) r -> new Tuple2<String, String>(r.getString(1), r.getString(0)));
|
|
|
|
|
2020-04-15 21:23:21 +02:00
|
|
|
sc.textFile(DedupUtility.createEntityPath(graphBasePath, "relation"))
|
|
|
|
.repartition(NUM_PARTITIONS)
|
|
|
|
.mapToPair(
|
2020-04-06 16:30:31 +02:00
|
|
|
(PairFunction<String, String, String>) s ->
|
|
|
|
new Tuple2<String, String>(MapDocumentUtil.getJPathString(SOURCEJSONPATH, s), s))
|
|
|
|
.leftOuterJoin(mergedIds)
|
|
|
|
.map((Function<Tuple2<String, Tuple2<String, Optional<String>>>, String>) v1 -> {
|
|
|
|
if (v1._2()._2().isPresent()) {
|
|
|
|
return replaceField(v1._2()._1(), v1._2()._2().get(), FieldType.SOURCE);
|
|
|
|
}
|
|
|
|
return v1._2()._1();
|
|
|
|
})
|
2020-04-15 21:23:21 +02:00
|
|
|
.saveAsTextFile(fixedSourceId, GzipCodec.class);
|
|
|
|
|
|
|
|
sc.textFile(fixedSourceId)
|
2020-04-06 16:30:31 +02:00
|
|
|
.mapToPair(
|
|
|
|
(PairFunction<String, String, String>) s ->
|
|
|
|
new Tuple2<String, String>(MapDocumentUtil.getJPathString(TARGETJSONPATH, s), s))
|
|
|
|
.leftOuterJoin(mergedIds)
|
|
|
|
.map((Function<Tuple2<String, Tuple2<String, Optional<String>>>, String>) v1 -> {
|
|
|
|
if (v1._2()._2().isPresent()) {
|
|
|
|
return replaceField(v1._2()._1(), v1._2()._2().get(), FieldType.TARGET);
|
|
|
|
}
|
|
|
|
return v1._2()._1();
|
|
|
|
}).filter(SparkPropagateRelation::containsDedup)
|
2020-04-15 21:23:21 +02:00
|
|
|
.repartition(NUM_PARTITIONS)
|
|
|
|
.saveAsTextFile(newRelsPath, GzipCodec.class);
|
2020-04-06 16:30:31 +02:00
|
|
|
|
|
|
|
//update deleted by inference
|
2020-04-15 21:23:21 +02:00
|
|
|
sc.textFile(DedupUtility.createEntityPath(graphBasePath, "relation"))
|
|
|
|
.repartition(NUM_PARTITIONS)
|
|
|
|
.mapToPair((PairFunction<String, String, String>) s ->
|
2020-04-06 16:30:31 +02:00
|
|
|
new Tuple2<String, String>(MapDocumentUtil.getJPathString(SOURCEJSONPATH, s), s))
|
|
|
|
.leftOuterJoin(mergedIds)
|
|
|
|
.map((Function<Tuple2<String, Tuple2<String, Optional<String>>>, String>) v1 -> {
|
|
|
|
if (v1._2()._2().isPresent()) {
|
|
|
|
return updateDeletedByInference(v1._2()._1(), Relation.class);
|
|
|
|
}
|
|
|
|
return v1._2()._1();
|
|
|
|
})
|
2020-04-15 21:23:21 +02:00
|
|
|
.saveAsTextFile(deletedSourceId, GzipCodec.class);
|
|
|
|
|
|
|
|
sc.textFile(deletedSourceId)
|
|
|
|
.repartition(NUM_PARTITIONS)
|
2020-04-06 16:30:31 +02:00
|
|
|
.mapToPair(
|
|
|
|
(PairFunction<String, String, String>) s ->
|
|
|
|
new Tuple2<String, String>(MapDocumentUtil.getJPathString(TARGETJSONPATH, s), s))
|
|
|
|
.leftOuterJoin(mergedIds)
|
|
|
|
.map((Function<Tuple2<String, Tuple2<String, Optional<String>>>, String>) v1 -> {
|
|
|
|
if (v1._2()._2().isPresent()) {
|
|
|
|
return updateDeletedByInference(v1._2()._1(), Relation.class);
|
|
|
|
}
|
|
|
|
return v1._2()._1();
|
|
|
|
})
|
2020-04-15 21:23:21 +02:00
|
|
|
.repartition(NUM_PARTITIONS)
|
2020-04-15 18:35:35 +02:00
|
|
|
.saveAsTextFile(DedupUtility.createEntityPath(workingPath, "updated"), GzipCodec.class);
|
2020-04-06 16:30:31 +02:00
|
|
|
|
2020-04-15 18:35:35 +02:00
|
|
|
JavaRDD<String> newRels = sc
|
2020-04-15 21:23:21 +02:00
|
|
|
.textFile(newRelsPath);
|
2020-04-15 18:35:35 +02:00
|
|
|
|
|
|
|
sc
|
|
|
|
.textFile(DedupUtility.createEntityPath(workingPath, "updated"))
|
|
|
|
.union(newRels)
|
2020-04-15 21:23:21 +02:00
|
|
|
.repartition(NUM_PARTITIONS)
|
|
|
|
.saveAsTextFile(relationsPath, GzipCodec.class);
|
|
|
|
}
|
|
|
|
|
|
|
|
private void deletePath(String path) {
|
|
|
|
try {
|
|
|
|
Path p = new Path(path);
|
|
|
|
FileSystem fs = FileSystem.get(spark.sparkContext().hadoopConfiguration());
|
|
|
|
|
|
|
|
if (fs.exists(p)) {
|
|
|
|
fs.delete(p, true);
|
|
|
|
}
|
|
|
|
} catch (IOException e) {
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
}
|
2020-03-20 13:01:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
private static boolean containsDedup(final String json) {
|
|
|
|
final String source = MapDocumentUtil.getJPathString(SOURCEJSONPATH, json);
|
|
|
|
final String target = MapDocumentUtil.getJPathString(TARGETJSONPATH, json);
|
|
|
|
|
|
|
|
return source.toLowerCase().contains("dedup") || target.toLowerCase().contains("dedup");
|
|
|
|
}
|
|
|
|
|
|
|
|
private static String replaceField(final String json, final String id, final FieldType type) {
|
|
|
|
try {
|
2020-04-15 21:23:21 +02:00
|
|
|
Relation relation = OBJECT_MAPPER.readValue(json, Relation.class);
|
|
|
|
if (relation.getDataInfo() == null) {
|
2020-03-20 13:01:56 +01:00
|
|
|
relation.setDataInfo(new DataInfo());
|
2020-04-15 21:23:21 +02:00
|
|
|
}
|
2020-03-20 13:01:56 +01:00
|
|
|
relation.getDataInfo().setDeletedbyinference(false);
|
|
|
|
switch (type) {
|
|
|
|
case SOURCE:
|
|
|
|
relation.setSource(id);
|
2020-04-15 21:23:21 +02:00
|
|
|
return OBJECT_MAPPER.writeValueAsString(relation);
|
2020-03-20 13:01:56 +01:00
|
|
|
case TARGET:
|
|
|
|
relation.setTarget(id);
|
2020-04-15 21:23:21 +02:00
|
|
|
return OBJECT_MAPPER.writeValueAsString(relation);
|
2020-03-20 13:01:56 +01:00
|
|
|
default:
|
|
|
|
throw new IllegalArgumentException("");
|
|
|
|
}
|
|
|
|
} catch (IOException e) {
|
|
|
|
throw new RuntimeException("unable to deserialize json relation: " + json, e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private static <T extends Oaf> String updateDeletedByInference(final String json, final Class<T> clazz) {
|
|
|
|
try {
|
2020-04-15 21:23:21 +02:00
|
|
|
Oaf entity = OBJECT_MAPPER.readValue(json, clazz);
|
|
|
|
if (entity.getDataInfo() == null) {
|
2020-03-20 13:01:56 +01:00
|
|
|
entity.setDataInfo(new DataInfo());
|
2020-04-15 21:23:21 +02:00
|
|
|
}
|
2020-03-20 13:01:56 +01:00
|
|
|
entity.getDataInfo().setDeletedbyinference(true);
|
2020-04-15 21:23:21 +02:00
|
|
|
return OBJECT_MAPPER.writeValueAsString(entity);
|
2020-03-20 13:01:56 +01:00
|
|
|
} catch (IOException e) {
|
|
|
|
throw new RuntimeException("Unable to convert json", e);
|
|
|
|
}
|
|
|
|
}
|
2020-04-15 18:35:35 +02:00
|
|
|
}
|