diff --git a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java index 0fa41bd6d..d16a3a63d 100644 --- a/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java +++ b/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkPropagateRelation.java @@ -1,19 +1,6 @@ package eu.dnetlib.dhp.oa.dedup; -import static org.apache.spark.sql.functions.col; - -import java.util.Objects; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.spark.SparkConf; -import org.apache.spark.api.java.function.FilterFunction; -import org.apache.spark.api.java.function.MapFunction; -import org.apache.spark.sql.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelSupport; @@ -21,198 +8,149 @@ import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.utils.ISLookupClientFactory; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; +import org.apache.commons.beanutils.BeanUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.spark.SparkConf; +import org.apache.spark.api.java.function.FilterFunction; +import org.apache.spark.api.java.function.MapFunction; +import org.apache.spark.api.java.function.ReduceFunction; +import org.apache.spark.sql.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import scala.Tuple2; import scala.Tuple3; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.Objects; + +import static org.apache.spark.sql.functions.col; + public class SparkPropagateRelation extends AbstractSparkAction { - private static final Logger log = LoggerFactory.getLogger(SparkPropagateRelation.class); + private static final Logger log = LoggerFactory.getLogger(SparkPropagateRelation.class); - enum FieldType { - SOURCE, TARGET - } + private static Encoder REL_BEAN_ENC = Encoders.bean(Relation.class); - public SparkPropagateRelation(ArgumentApplicationParser parser, SparkSession spark) { - super(parser, spark); - } + private static Encoder REL_KRYO_ENC = Encoders.kryo(Relation.class); - public static void main(String[] args) throws Exception { - ArgumentApplicationParser parser = new ArgumentApplicationParser( - IOUtils - .toString( - SparkPropagateRelation.class - .getResourceAsStream( - "/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json"))); + public SparkPropagateRelation(ArgumentApplicationParser parser, SparkSession spark) { + super(parser, spark); + } - parser.parseArgument(args); + public static void main(String[] args) throws Exception { + ArgumentApplicationParser parser = new ArgumentApplicationParser( + IOUtils + .toString( + SparkPropagateRelation.class + .getResourceAsStream( + "/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json"))); - SparkConf conf = new SparkConf(); - conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); - conf.registerKryoClasses(ModelSupport.getOafModelClasses()); + parser.parseArgument(args); - new SparkPropagateRelation(parser, getSparkSession(conf)) - .run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl"))); - } + SparkConf conf = new SparkConf(); + conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); + conf.registerKryoClasses(ModelSupport.getOafModelClasses()); - @Override - public void run(ISLookUpService isLookUpService) { + new SparkPropagateRelation(parser, getSparkSession(conf)) + .run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl"))); + } - final String graphBasePath = parser.get("graphBasePath"); - final String workingPath = parser.get("workingPath"); - final String graphOutputPath = parser.get("graphOutputPath"); + @Override + public void run(ISLookUpService isLookUpService) { - log.info("graphBasePath: '{}'", graphBasePath); - log.info("workingPath: '{}'", workingPath); - log.info("graphOutputPath: '{}'", graphOutputPath); + final String graphBasePath = parser.get("graphBasePath"); + final String workingPath = parser.get("workingPath"); + final String graphOutputPath = parser.get("graphOutputPath"); - final String outputRelationPath = DedupUtility.createEntityPath(graphOutputPath, "relation"); - removeOutputDir(spark, outputRelationPath); + log.info("graphBasePath: '{}'", graphBasePath); + log.info("workingPath: '{}'", workingPath); + log.info("graphOutputPath: '{}'", graphOutputPath); - Dataset mergeRels = spark - .read() - .load(DedupUtility.createMergeRelPath(workingPath, "*", "*")) - .as(Encoders.bean(Relation.class)); + final String outputRelationPath = DedupUtility.createEntityPath(graphOutputPath, "relation"); + removeOutputDir(spark, outputRelationPath); - // - Dataset> mergedIds = mergeRels - .where(col("relClass").equalTo(ModelConstants.MERGES)) - .select(col("source"), col("target")) - .distinct() - .map( - (MapFunction>) r -> new Tuple2<>(r.getString(1), r.getString(0)), - Encoders.tuple(Encoders.STRING(), Encoders.STRING())) - .cache(); + Dataset mergeRels = spark + .read() + .load(DedupUtility.createMergeRelPath(workingPath, "*", "*")) + .as(REL_BEAN_ENC); - final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation"); + // + Dataset mergedIds = mergeRels + .where(col("relClass").equalTo(ModelConstants.MERGES)) + .select(col("source").as("dedupID"), col("target").as("mergedObjectID")) + .distinct() + .cache(); - Dataset rels = spark.read().textFile(relationPath).map(patchRelFn(), Encoders.bean(Relation.class)); + Dataset allRels = spark.read() + .schema(REL_BEAN_ENC.schema()) + .json(DedupUtility.createEntityPath(graphBasePath, "relation")); - Dataset newRels = createNewRels(rels, mergedIds, getFixRelFn()); + Dataset dedupedRels = allRels + .joinWith(mergedIds, allRels.col("source").equalTo(mergedIds.col("mergedObjectID")), "left_outer") + .joinWith(mergedIds, col("_1.target").equalTo(mergedIds.col("mergedObjectID")), "left_outer") + .select("_1._1", "_1._2.dedupID", "_2.dedupID") + .as(Encoders.tuple(REL_BEAN_ENC, Encoders.STRING(), Encoders.STRING())) + .flatMap(SparkPropagateRelation::addInferredRelations, REL_KRYO_ENC); - Dataset updated = processDataset( - processDataset(rels, mergedIds, FieldType.SOURCE, getDeletedFn()), - mergedIds, - FieldType.TARGET, - getDeletedFn()); + Dataset processedRelations = distinctRelations(dedupedRels.union(mergeRels.map((MapFunction) r -> r, REL_KRYO_ENC))) + .filter((FilterFunction) r -> !Objects.equals(r.getSource(), r.getTarget())); - save( - distinctRelations( - newRels - .union(updated) - .union(mergeRels) - .map((MapFunction) r -> r, Encoders.kryo(Relation.class))) - .filter((FilterFunction) r -> !Objects.equals(r.getSource(), r.getTarget())), - outputRelationPath, SaveMode.Overwrite); - } + save(processedRelations, outputRelationPath, SaveMode.Overwrite); + } - private Dataset distinctRelations(Dataset rels) { - return rels - .filter(getRelationFilterFunction()) - .groupByKey( - (MapFunction) r -> String - .join(r.getSource(), r.getTarget(), r.getRelType(), r.getSubRelType(), r.getRelClass()), - Encoders.STRING()) - .agg(new RelationAggregator().toColumn()) - .map((MapFunction, Relation>) Tuple2::_2, Encoders.bean(Relation.class)); - } + private static Iterator addInferredRelations(Tuple3 t) throws Exception { + Relation existingRel = t._1(); + String newSource = t._2(); + String newTarget = t._3(); - // redirect the relations to the dedupID - private static Dataset createNewRels( - Dataset rels, // all the relations to be redirected - Dataset> mergedIds, // merge rels: - MapFunction, Tuple2>, Tuple2>, Relation> mapRel) { + if (newSource == null && newTarget == null) { + return Collections.singleton(t._1()).iterator(); + } - // - Dataset> mapped = rels - .map( - (MapFunction>) r -> new Tuple3<>(getId(r, FieldType.SOURCE), - r, getId(r, FieldType.TARGET)), - Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class), Encoders.STRING())); + // update existing relation + if (existingRel.getDataInfo() == null) { + existingRel.setDataInfo(new DataInfo()); + } + existingRel.getDataInfo().setDeletedbyinference(true); - // < , > - Dataset, Tuple2>> relSource = mapped - .joinWith(mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer"); + // Create new relation inferred by dedupIDs + Relation inferredRel = (Relation) BeanUtils.cloneBean(existingRel); - // < <, >, > - Dataset, Tuple2>, Tuple2>> relSourceTarget = relSource - .joinWith(mergedIds, relSource.col("_1._3").equalTo(mergedIds.col("_1")), "left_outer"); + inferredRel.setDataInfo((DataInfo) BeanUtils.cloneBean(existingRel.getDataInfo())); + inferredRel.getDataInfo().setDeletedbyinference(false); - return relSourceTarget - .filter( - (FilterFunction, Tuple2>, Tuple2>>) r -> r - ._1() - ._1() != null || r._2() != null) - .map(mapRel, Encoders.bean(Relation.class)) - .distinct(); - } + if (newSource != null) + inferredRel.setSource(newSource); - private static Dataset processDataset( - Dataset rels, - Dataset> mergedIds, - FieldType type, - MapFunction, Tuple2>, Relation> mapFn) { - final Dataset> mapped = rels - .map( - (MapFunction>) r -> new Tuple2<>(getId(r, type), r), - Encoders.tuple(Encoders.STRING(), Encoders.kryo(Relation.class))); - return mapped - .joinWith(mergedIds, mapped.col("_1").equalTo(mergedIds.col("_1")), "left_outer") - .map(mapFn, Encoders.bean(Relation.class)); - } + if (newTarget != null) + inferredRel.setTarget(newTarget); - private FilterFunction getRelationFilterFunction() { - return r -> StringUtils.isNotBlank(r.getSource()) || - StringUtils.isNotBlank(r.getTarget()) || - StringUtils.isNotBlank(r.getRelType()) || - StringUtils.isNotBlank(r.getSubRelType()) || - StringUtils.isNotBlank(r.getRelClass()); - } + return Arrays.asList(existingRel, inferredRel).iterator(); + } - private static String getId(Relation r, FieldType type) { - switch (type) { - case SOURCE: - return r.getSource(); - case TARGET: - return r.getTarget(); - default: - throw new IllegalArgumentException(""); - } - } - - private static MapFunction, Tuple2>, Tuple2>, Relation> getFixRelFn() { - return value -> { - - Relation r = value._1()._1()._2(); - String newSource = value._1()._2() != null ? value._1()._2()._2() : null; - String newTarget = value._2() != null ? value._2()._2() : null; - - if (r.getDataInfo() == null) { - r.setDataInfo(new DataInfo()); - } - r.getDataInfo().setDeletedbyinference(false); - - if (newSource != null) - r.setSource(newSource); - - if (newTarget != null) - r.setTarget(newTarget); - - return r; - }; - } - - private static MapFunction, Tuple2>, Relation> getDeletedFn() { - return value -> { - if (value._2() != null) { - Relation r = value._1()._2(); - if (r.getDataInfo() == null) { - r.setDataInfo(new DataInfo()); - } - r.getDataInfo().setDeletedbyinference(true); - return r; - } - return value._1()._2(); - }; - } + private Dataset distinctRelations(Dataset rels) { + return rels + .filter(getRelationFilterFunction()) + .groupByKey( + (MapFunction) r -> String + .join(" ", r.getSource(), r.getTarget(), r.getRelType(), r.getSubRelType(), r.getRelClass()), + Encoders.STRING()) + .reduceGroups((ReduceFunction) (b, a) -> { + b.mergeFrom(a); + return b; + } + ) + .map((MapFunction, Relation>) Tuple2::_2, REL_BEAN_ENC); + } + private FilterFunction getRelationFilterFunction() { + return r -> StringUtils.isNotBlank(r.getSource()) || + StringUtils.isNotBlank(r.getTarget()) || + StringUtils.isNotBlank(r.getRelType()) || + StringUtils.isNotBlank(r.getSubRelType()) || + StringUtils.isNotBlank(r.getRelClass()); + } }