2020-03-27 10:42:17 +01:00
|
|
|
package eu.dnetlib.dhp.oa.dedup;
|
2020-03-20 13:01:56 +01:00
|
|
|
|
|
|
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
2020-04-18 12:06:23 +02:00
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
2020-03-20 13:01:56 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
2020-04-06 16:30:31 +02:00
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2020-03-20 13:01:56 +01:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-04-15 21:23:21 +02:00
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2020-04-18 12:06:23 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
2020-04-16 11:13:51 +02:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
2020-04-18 12:06:23 +02:00
|
|
|
import org.apache.spark.sql.*;
|
2020-04-16 11:13:51 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
2020-03-20 13:01:56 +01:00
|
|
|
import scala.Tuple2;
|
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
import static org.apache.spark.sql.functions.col;
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
public class SparkPropagateRelation extends AbstractSparkAction {
|
2020-03-20 13:01:56 +01:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(SparkPropagateRelation.class);
|
2020-04-15 21:23:21 +02:00
|
|
|
|
2020-03-20 13:01:56 +01:00
|
|
|
enum FieldType {
|
|
|
|
SOURCE,
|
|
|
|
TARGET
|
|
|
|
}
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
public SparkPropagateRelation(ArgumentApplicationParser parser, SparkSession spark) throws Exception {
|
|
|
|
super(parser, spark);
|
|
|
|
}
|
|
|
|
|
2020-03-20 13:01:56 +01:00
|
|
|
public static void main(String[] args) throws Exception {
|
2020-04-06 16:30:31 +02:00
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2020-04-16 11:13:51 +02:00
|
|
|
IOUtils.toString(SparkCreateSimRels.class.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
|
|
|
|
|
2020-03-20 13:01:56 +01:00
|
|
|
parser.parseArgument(args);
|
|
|
|
|
2020-04-18 12:06:23 +02:00
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
|
|
|
conf.registerKryoClasses(ModelSupport.getOafModelClasses());
|
|
|
|
|
|
|
|
new SparkPropagateRelation(parser, getSparkSession(conf))
|
2020-04-16 11:13:51 +02:00
|
|
|
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
2020-03-20 13:01:56 +01:00
|
|
|
}
|
|
|
|
|
2020-04-06 16:30:31 +02:00
|
|
|
@Override
|
|
|
|
public void run(ISLookUpService isLookUpService) {
|
2020-03-20 13:01:56 +01:00
|
|
|
|
|
|
|
final String graphBasePath = parser.get("graphBasePath");
|
|
|
|
final String workingPath = parser.get("workingPath");
|
|
|
|
final String dedupGraphPath = parser.get("dedupGraphPath");
|
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
log.info("graphBasePath: '{}'", graphBasePath);
|
|
|
|
log.info("workingPath: '{}'", workingPath);
|
|
|
|
log.info("dedupGraphPath: '{}'", dedupGraphPath);
|
2020-04-15 16:34:06 +02:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
final String outputRelationPath = DedupUtility.createEntityPath(dedupGraphPath, "relation");
|
2020-04-17 13:12:44 +02:00
|
|
|
removeOutputDir(spark, outputRelationPath);
|
2020-04-15 21:23:21 +02:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
Dataset<Relation> mergeRels = spark.read()
|
|
|
|
.load(DedupUtility.createMergeRelPath(workingPath, "*", "*"))
|
|
|
|
.as(Encoders.bean(Relation.class));
|
2020-04-06 16:30:31 +02:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
Dataset<Tuple2<String, String>> mergedIds = mergeRels
|
|
|
|
.where(col("relClass").equalTo("merges"))
|
|
|
|
.select(col("source"), col("target"))
|
|
|
|
.distinct()
|
|
|
|
.map((MapFunction<Row, Tuple2<String, String>>)
|
|
|
|
r -> new Tuple2<>(r.getString(1), r.getString(0)),
|
|
|
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()))
|
|
|
|
.cache();
|
2020-04-15 21:23:21 +02:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
|
|
|
|
|
|
|
|
Dataset<Relation> rels = spark.read()
|
|
|
|
.textFile(relationPath)
|
|
|
|
.map(patchRelFn(), Encoders.bean(Relation.class));
|
|
|
|
|
|
|
|
Dataset<Relation> newRels =
|
|
|
|
processDataset(
|
|
|
|
processDataset(rels, mergedIds, FieldType.SOURCE, getFixRelFn(FieldType.SOURCE)),
|
|
|
|
mergedIds, FieldType.TARGET, getFixRelFn(FieldType.TARGET))
|
|
|
|
.filter(SparkPropagateRelation::containsDedup);
|
|
|
|
|
|
|
|
Dataset<Relation> updated = processDataset(
|
|
|
|
processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()),
|
|
|
|
mergedIds, FieldType.TARGET, getDeletedFn());
|
|
|
|
|
2020-04-18 12:06:23 +02:00
|
|
|
save(newRels.union(updated), outputRelationPath, SaveMode.Overwrite);
|
2020-04-06 16:30:31 +02:00
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
private static Dataset<Relation> processDataset(Dataset<Relation> rels, Dataset<Tuple2<String, String>> mergedIds, FieldType type,
|
|
|
|
MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> mapFn) {
|
|
|
|
final Dataset<Tuple2<String, Relation>> mapped = rels
|
|
|
|
.map((MapFunction<Relation, Tuple2<String, Relation>>)
|
|
|
|
r -> new Tuple2<>(getId(r, type), r),
|
|
|
|
Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class)));
|
|
|
|
return mapped
|
|
|
|
.joinWith(mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer")
|
|
|
|
.map(mapFn, Encoders.bean(Relation.class));
|
|
|
|
}
|
|
|
|
|
|
|
|
private static MapFunction<String, Relation> patchRelFn() {
|
|
|
|
return value -> {
|
|
|
|
final Relation rel = OBJECT_MAPPER.readValue(value, Relation.class);
|
|
|
|
if (rel.getDataInfo() == null) {
|
|
|
|
rel.setDataInfo(new DataInfo());
|
|
|
|
}
|
|
|
|
return rel;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
private static String getId(Relation r, FieldType type) {
|
|
|
|
switch (type) {
|
|
|
|
case SOURCE:
|
|
|
|
return r.getSource();
|
|
|
|
case TARGET:
|
|
|
|
return r.getTarget();
|
|
|
|
default:
|
|
|
|
throw new IllegalArgumentException("");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private static MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> getFixRelFn(FieldType type) {
|
|
|
|
return value -> {
|
|
|
|
if (value._2() != null) {
|
|
|
|
Relation r = value._1()._2();
|
|
|
|
String id = value._2()._2();
|
|
|
|
if (r.getDataInfo() == null) {
|
|
|
|
r.setDataInfo(new DataInfo());
|
|
|
|
}
|
|
|
|
r.getDataInfo().setDeletedbyinference(false);
|
|
|
|
switch (type) {
|
|
|
|
case SOURCE:
|
|
|
|
r.setSource(id);
|
|
|
|
return r;
|
|
|
|
case TARGET:
|
|
|
|
r.setTarget(id);
|
|
|
|
return r;
|
|
|
|
default:
|
|
|
|
throw new IllegalArgumentException("");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return value._1()._2();
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
private static MapFunction<Tuple2<Tuple2<String, Relation>, Tuple2<String, String>>, Relation> getDeletedFn() {
|
|
|
|
return value -> {
|
|
|
|
if (value._2() != null) {
|
|
|
|
Relation r = value._1()._2();
|
|
|
|
if (r.getDataInfo() == null) {
|
|
|
|
r.setDataInfo(new DataInfo());
|
|
|
|
}
|
|
|
|
r.getDataInfo().setDeletedbyinference(true);
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
return value._1()._2();
|
|
|
|
};
|
2020-04-15 21:23:21 +02:00
|
|
|
}
|
|
|
|
|
2020-04-16 11:13:51 +02:00
|
|
|
private static boolean containsDedup(final Relation r) {
|
|
|
|
return r.getSource().toLowerCase().contains("dedup") || r.getTarget().toLowerCase().contains("dedup");
|
2020-03-20 13:01:56 +01:00
|
|
|
}
|
|
|
|
|
2020-04-15 18:35:35 +02:00
|
|
|
}
|