2020-04-27 14:52:31 +02:00
|
|
|
|
2020-03-27 10:42:17 +01:00
|
|
|
package eu.dnetlib.dhp.oa.dedup;
|
2019-12-06 13:38:00 +01:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
|
|
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.PROVENANCE_DEDUP;
|
|
|
|
import static org.apache.spark.sql.functions.*;
|
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.time.LocalDate;
|
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.Arrays;
|
|
|
|
import java.util.Collections;
|
|
|
|
import java.util.Optional;
|
|
|
|
|
2019-12-06 13:38:00 +01:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2023-10-02 09:25:12 +02:00
|
|
|
import org.apache.commons.lang3.StringUtils;
|
2020-04-18 12:06:23 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
2021-04-15 10:59:24 +02:00
|
|
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
2023-10-02 09:25:12 +02:00
|
|
|
import org.apache.spark.sql.*;
|
2023-12-11 21:26:05 +01:00
|
|
|
import org.apache.spark.sql.Dataset;
|
2023-10-02 09:25:12 +02:00
|
|
|
import org.apache.spark.sql.catalyst.encoders.RowEncoder;
|
|
|
|
import org.apache.spark.sql.expressions.UserDefinedFunction;
|
|
|
|
import org.apache.spark.sql.expressions.Window;
|
|
|
|
import org.apache.spark.sql.expressions.WindowSpec;
|
|
|
|
import org.apache.spark.sql.types.DataTypes;
|
|
|
|
import org.apache.spark.sql.types.StructType;
|
2020-03-20 13:01:56 +01:00
|
|
|
import org.dom4j.DocumentException;
|
2020-04-16 12:36:37 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
2021-08-11 12:13:22 +02:00
|
|
|
import org.xml.sax.SAXException;
|
2020-04-28 11:23:29 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
import com.google.common.hash.Hashing;
|
|
|
|
import com.kwartile.lib.cc.ConnectedComponent;
|
2020-04-28 11:23:29 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.*;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.utils.PidType;
|
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
|
|
|
import eu.dnetlib.pace.config.DedupConfig;
|
|
|
|
import scala.Tuple3;
|
|
|
|
import scala.collection.JavaConversions;
|
2019-12-06 13:38:00 +01:00
|
|
|
|
2020-04-16 12:13:26 +02:00
|
|
|
public class SparkCreateMergeRels extends AbstractSparkAction {
|
2020-04-06 16:30:31 +02:00
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(SparkCreateMergeRels.class);
|
|
|
|
|
|
|
|
public SparkCreateMergeRels(ArgumentApplicationParser parser, SparkSession spark) {
|
|
|
|
super(parser, spark);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils
|
|
|
|
.toString(
|
2023-05-17 10:16:22 +02:00
|
|
|
SparkCreateMergeRels.class
|
2020-04-27 14:52:31 +02:00
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
final String isLookUpUrl = parser.get("isLookUpUrl");
|
|
|
|
log.info("isLookupUrl {}", isLookUpUrl);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
2023-10-02 09:25:12 +02:00
|
|
|
conf.set("hive.metastore.uris", parser.get("hiveMetastoreUris"));
|
2020-04-27 14:52:31 +02:00
|
|
|
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
2023-10-02 09:25:12 +02:00
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
conf.registerKryoClasses(ModelSupport.getOafModelClasses());
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
new SparkCreateMergeRels(parser, getSparkWithHiveSession(conf))
|
2020-04-27 14:52:31 +02:00
|
|
|
.run(ISLookupClientFactory.getLookUpService(isLookUpUrl));
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run(ISLookUpService isLookUpService)
|
2021-08-11 12:13:22 +02:00
|
|
|
throws ISLookUpException, DocumentException, IOException, SAXException {
|
2020-04-27 14:52:31 +02:00
|
|
|
|
|
|
|
final String graphBasePath = parser.get("graphBasePath");
|
|
|
|
final String workingPath = parser.get("workingPath");
|
|
|
|
final String isLookUpUrl = parser.get("isLookUpUrl");
|
|
|
|
final String actionSetId = parser.get("actionSetId");
|
2020-07-13 15:40:41 +02:00
|
|
|
int cut = Optional
|
2020-07-13 16:46:13 +02:00
|
|
|
.ofNullable(parser.get("cutConnectedComponent"))
|
|
|
|
.map(Integer::valueOf)
|
|
|
|
.orElse(0);
|
2023-10-02 09:25:12 +02:00
|
|
|
|
|
|
|
final String pivotHistoryDatabase = parser.get("pivotHistoryDatabase");
|
|
|
|
|
2020-07-13 15:40:41 +02:00
|
|
|
log.info("connected component cut: '{}'", cut);
|
2020-04-27 14:52:31 +02:00
|
|
|
log.info("graphBasePath: '{}'", graphBasePath);
|
|
|
|
log.info("isLookUpUrl: '{}'", isLookUpUrl);
|
|
|
|
log.info("actionSetId: '{}'", actionSetId);
|
|
|
|
log.info("workingPath: '{}'", workingPath);
|
|
|
|
|
|
|
|
for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) {
|
|
|
|
final String subEntity = dedupConf.getWf().getSubEntityValue();
|
2021-04-15 10:59:24 +02:00
|
|
|
final Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
|
2020-04-27 14:52:31 +02:00
|
|
|
|
2023-05-17 10:16:22 +02:00
|
|
|
log.info("Creating merge rels for: '{}'", subEntity);
|
2020-04-27 14:52:31 +02:00
|
|
|
|
|
|
|
final int maxIterations = dedupConf.getWf().getMaxIterations();
|
|
|
|
log.info("Max iterations {}", maxIterations);
|
|
|
|
|
|
|
|
final String mergeRelPath = DedupUtility.createMergeRelPath(workingPath, actionSetId, subEntity);
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
final Dataset<Row> simRels = spark
|
2020-09-29 15:31:46 +02:00
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity))
|
2023-10-02 09:25:12 +02:00
|
|
|
.select("source", "target");
|
2021-04-15 10:59:24 +02:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
UserDefinedFunction hashUDF = functions
|
|
|
|
.udf(
|
|
|
|
(String s) -> hash(s), DataTypes.LongType);
|
2021-04-15 10:59:24 +02:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
// <hash(id), id>
|
|
|
|
Dataset<Row> vertexIdMap = simRels
|
|
|
|
.selectExpr("source as id")
|
|
|
|
.union(simRels.selectExpr("target as id"))
|
|
|
|
.distinct()
|
|
|
|
.withColumn("vertexId", hashUDF.apply(functions.col("id")));
|
2021-04-15 10:59:24 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// transform simrels into pairs of numeric ids
|
2023-10-02 09:25:12 +02:00
|
|
|
final Dataset<Row> edges = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity))
|
|
|
|
.select("source", "target")
|
|
|
|
.withColumn("source", hashUDF.apply(functions.col("source")))
|
|
|
|
.withColumn("target", hashUDF.apply(functions.col("target")));
|
2021-04-15 10:59:24 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// resolve connected components
|
|
|
|
// ("vertexId", "groupId")
|
2023-10-02 09:25:12 +02:00
|
|
|
Dataset<Row> cliques = ConnectedComponent
|
|
|
|
.runOnPairs(edges, 50, spark);
|
2021-04-14 18:06:07 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// transform "vertexId" back to its original string value
|
|
|
|
// groupId is kept numeric as its string value is not used
|
|
|
|
// ("id", "groupId")
|
2023-10-02 09:25:12 +02:00
|
|
|
Dataset<Row> rawMergeRels = cliques
|
|
|
|
.join(vertexIdMap, JavaConversions.asScalaBuffer(Collections.singletonList("vertexId")), "inner")
|
|
|
|
.drop("vertexId")
|
|
|
|
.distinct();
|
2021-04-15 10:59:24 +02:00
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// empty dataframe if historydatabase is not used
|
2023-10-02 09:25:12 +02:00
|
|
|
Dataset<Row> pivotHistory = spark
|
|
|
|
.createDataset(
|
|
|
|
Collections.emptyList(),
|
|
|
|
RowEncoder
|
2023-12-11 21:26:05 +01:00
|
|
|
.apply(StructType.fromDDL("id STRING, lastUsage STRING")));
|
2023-10-02 09:25:12 +02:00
|
|
|
|
|
|
|
if (StringUtils.isNotBlank(pivotHistoryDatabase)) {
|
|
|
|
pivotHistory = spark
|
|
|
|
.read()
|
|
|
|
.table(pivotHistoryDatabase + "." + subEntity)
|
2023-12-11 21:26:05 +01:00
|
|
|
.selectExpr("id", "lastUsage");
|
2023-10-02 09:25:12 +02:00
|
|
|
}
|
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// depending on resulttype collectefrom and dateofacceptance are evaluated differently
|
2023-10-02 09:25:12 +02:00
|
|
|
String collectedfromExpr = "false AS collectedfrom";
|
|
|
|
String dateExpr = "'' AS date";
|
|
|
|
|
|
|
|
if (Result.class.isAssignableFrom(clazz)) {
|
|
|
|
if (Publication.class.isAssignableFrom(clazz)) {
|
|
|
|
collectedfromExpr = "array_contains(collectedfrom.key, '" + ModelConstants.CROSSREF_ID
|
|
|
|
+ "') AS collectedfrom";
|
|
|
|
} else if (eu.dnetlib.dhp.schema.oaf.Dataset.class.isAssignableFrom(clazz)) {
|
|
|
|
collectedfromExpr = "array_contains(collectedfrom.key, '" + ModelConstants.DATACITE_ID
|
|
|
|
+ "') AS collectedfrom";
|
|
|
|
}
|
|
|
|
|
|
|
|
dateExpr = "dateofacceptance.value AS date";
|
|
|
|
}
|
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// cap pidType at w3id as from there on they are considered equal
|
2023-10-02 09:25:12 +02:00
|
|
|
UserDefinedFunction mapPid = udf(
|
|
|
|
(String s) -> Math.min(PidType.tryValueOf(s).ordinal(), PidType.w3id.ordinal()), DataTypes.IntegerType);
|
2023-12-11 21:26:05 +01:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
UserDefinedFunction validDate = udf((String date) -> {
|
|
|
|
if (StringUtils.isNotBlank(date)
|
|
|
|
&& date.matches(DatePicker.DATE_PATTERN) && DatePicker.inRange(date)) {
|
|
|
|
return date;
|
|
|
|
}
|
|
|
|
return LocalDate.now().plusWeeks(1).toString();
|
|
|
|
}, DataTypes.StringType);
|
|
|
|
|
|
|
|
Dataset<Row> pivotingData = spark
|
|
|
|
.read()
|
|
|
|
.schema(Encoders.bean(clazz).schema())
|
|
|
|
.json(DedupUtility.createEntityPath(graphBasePath, subEntity))
|
|
|
|
.selectExpr(
|
|
|
|
"id",
|
|
|
|
"regexp_extract(id, '^\\\\d+\\\\|([^_]+).*::', 1) AS pidType",
|
|
|
|
collectedfromExpr,
|
|
|
|
dateExpr)
|
|
|
|
.withColumn("pidType", mapPid.apply(col("pidType"))) // ordinal of pid type
|
|
|
|
.withColumn("date", validDate.apply(col("date")));
|
|
|
|
|
|
|
|
// ordering to selected pivot id
|
|
|
|
WindowSpec w = Window
|
|
|
|
.partitionBy("groupId")
|
|
|
|
.orderBy(
|
|
|
|
col("lastUsage").desc_nulls_last(),
|
|
|
|
col("pidType").asc_nulls_last(),
|
|
|
|
col("collectedfrom").desc_nulls_last(),
|
|
|
|
col("date").asc_nulls_last(),
|
|
|
|
col("id").asc_nulls_last());
|
|
|
|
|
|
|
|
Dataset<Relation> output = rawMergeRels
|
|
|
|
.join(pivotHistory, JavaConversions.asScalaBuffer(Collections.singletonList("id")), "full")
|
|
|
|
.join(pivotingData, JavaConversions.asScalaBuffer(Collections.singletonList("id")), "left")
|
|
|
|
.withColumn("pivot", functions.first("id").over(w))
|
|
|
|
.withColumn("position", functions.row_number().over(w))
|
|
|
|
.flatMap(
|
|
|
|
(FlatMapFunction<Row, Tuple3<String, String, String>>) (Row r) -> {
|
|
|
|
String id = r.getAs("id");
|
2023-12-11 21:26:05 +01:00
|
|
|
String dedupId = IdGenerator.generate(id);
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
String pivot = r.getAs("pivot");
|
2023-12-11 21:26:05 +01:00
|
|
|
String pivotDedupId = IdGenerator.generate(pivot);
|
2023-10-02 09:25:12 +02:00
|
|
|
|
|
|
|
// filter out id == pivotDedupId
|
|
|
|
// those are caused by claim expressed on pivotDedupId
|
|
|
|
// information will be merged after creating deduprecord
|
|
|
|
if (id.equals(pivotDedupId)) {
|
|
|
|
return Collections.emptyIterator();
|
|
|
|
}
|
|
|
|
|
|
|
|
ArrayList<Tuple3<String, String, String>> res = new ArrayList<>();
|
|
|
|
|
|
|
|
// singleton pivots have null groupId as they do not match rawMergeRels
|
|
|
|
if (r.isNullAt(r.fieldIndex("groupId"))) {
|
|
|
|
// the record is existing if it matches pivotingData
|
|
|
|
if (!r.isNullAt(r.fieldIndex("collectedfrom"))) {
|
|
|
|
// create relation with old dedup id
|
|
|
|
res.add(new Tuple3<>(id, dedupId, null));
|
|
|
|
}
|
|
|
|
return res.iterator();
|
|
|
|
}
|
|
|
|
|
2023-12-11 21:26:05 +01:00
|
|
|
// this was a pivot in a previous graph but it has been merged into a new group with different
|
2023-10-02 09:25:12 +02:00
|
|
|
// pivot
|
2024-01-23 08:47:12 +01:00
|
|
|
if (!r.isNullAt(r.fieldIndex("lastUsage")) && !pivot.equals(id)
|
|
|
|
&& !dedupId.equals(pivotDedupId)) {
|
2023-10-02 09:25:12 +02:00
|
|
|
// materialize the previous dedup record as a merge relation with the new one
|
|
|
|
res.add(new Tuple3<>(dedupId, pivotDedupId, null));
|
|
|
|
}
|
|
|
|
|
|
|
|
// add merge relations
|
2024-01-23 08:47:12 +01:00
|
|
|
if (cut <= 0 || r.<Integer> getAs("position") <= cut) {
|
2023-12-14 11:51:02 +01:00
|
|
|
res.add(new Tuple3<>(id, pivotDedupId, pivot));
|
|
|
|
}
|
2023-10-02 09:25:12 +02:00
|
|
|
|
|
|
|
return res.iterator();
|
|
|
|
}, Encoders.tuple(Encoders.STRING(), Encoders.STRING(), Encoders.STRING()))
|
|
|
|
.distinct()
|
|
|
|
.flatMap(
|
|
|
|
(FlatMapFunction<Tuple3<String, String, String>, Relation>) (Tuple3<String, String, String> r) -> {
|
|
|
|
String id = r._1();
|
|
|
|
String dedupId = r._2();
|
|
|
|
String pivot = r._3();
|
2021-04-14 18:06:07 +02:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
ArrayList<Relation> res = new ArrayList<>();
|
|
|
|
res.add(rel(pivot, dedupId, id, ModelConstants.MERGES, dedupConf));
|
|
|
|
res.add(rel(pivot, id, dedupId, ModelConstants.IS_MERGED_IN, dedupConf));
|
2020-04-27 14:52:31 +02:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
return res.iterator();
|
|
|
|
}, Encoders.bean(Relation.class));
|
2020-04-27 14:52:31 +02:00
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
saveParquet(output, mergeRelPath, SaveMode.Overwrite);
|
|
|
|
}
|
2020-04-27 14:52:31 +02:00
|
|
|
}
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
private static Relation rel(String pivot, String source, String target, String relClass, DedupConfig dedupConf) {
|
2020-05-08 15:43:30 +02:00
|
|
|
|
2020-05-08 16:49:47 +02:00
|
|
|
String entityType = dedupConf.getWf().getEntityType();
|
2020-05-08 15:43:30 +02:00
|
|
|
|
2020-05-08 16:49:47 +02:00
|
|
|
Relation r = new Relation();
|
2020-04-27 14:52:31 +02:00
|
|
|
r.setSource(source);
|
|
|
|
r.setTarget(target);
|
|
|
|
r.setRelClass(relClass);
|
2020-05-08 15:43:30 +02:00
|
|
|
r.setRelType(entityType + entityType.substring(0, 1).toUpperCase() + entityType.substring(1));
|
2021-03-31 17:07:13 +02:00
|
|
|
r.setSubRelType(ModelConstants.DEDUP);
|
2020-04-27 14:52:31 +02:00
|
|
|
|
|
|
|
DataInfo info = new DataInfo();
|
|
|
|
info.setDeletedbyinference(false);
|
|
|
|
info.setInferred(true);
|
|
|
|
info.setInvisible(false);
|
|
|
|
info.setInferenceprovenance(dedupConf.getWf().getConfigurationId());
|
|
|
|
Qualifier provenanceAction = new Qualifier();
|
2021-04-15 10:59:24 +02:00
|
|
|
provenanceAction.setClassid(PROVENANCE_DEDUP);
|
|
|
|
provenanceAction.setClassname(PROVENANCE_DEDUP);
|
2020-04-27 14:52:31 +02:00
|
|
|
provenanceAction.setSchemeid(DNET_PROVENANCE_ACTIONS);
|
|
|
|
provenanceAction.setSchemename(DNET_PROVENANCE_ACTIONS);
|
|
|
|
info.setProvenanceaction(provenanceAction);
|
|
|
|
|
|
|
|
// TODO calculate the trust value based on the similarity score of the elements in the CC
|
|
|
|
|
|
|
|
r.setDataInfo(info);
|
2023-10-02 09:25:12 +02:00
|
|
|
|
|
|
|
if (pivot != null) {
|
|
|
|
KeyValue pivotKV = new KeyValue();
|
|
|
|
pivotKV.setKey("pivot");
|
|
|
|
pivotKV.setValue(pivot);
|
|
|
|
|
|
|
|
r.setProperties(Arrays.asList(pivotKV));
|
|
|
|
}
|
2020-04-27 14:52:31 +02:00
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
|
|
|
public static long hash(final String id) {
|
|
|
|
return Hashing.murmur3_128().hashString(id).asLong();
|
|
|
|
}
|
2019-12-06 13:38:00 +01:00
|
|
|
}
|