2021-02-10 11:51:50 +01:00
|
|
|
|
2021-01-18 16:48:08 +01:00
|
|
|
package eu.dnetlib.dhp.oa.dedup;
|
|
|
|
|
2021-02-10 11:51:50 +01:00
|
|
|
import java.io.IOException;
|
|
|
|
import java.util.Optional;
|
|
|
|
|
2021-01-18 16:48:08 +01:00
|
|
|
import org.apache.commons.io.IOUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
2021-04-06 14:31:00 +02:00
|
|
|
import org.apache.spark.api.java.JavaPairRDD;
|
2021-02-26 10:19:28 +01:00
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
2021-01-18 16:48:08 +01:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SaveMode;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.dom4j.DocumentException;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2021-02-10 11:51:50 +01:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
2021-02-26 10:19:28 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Organization;
|
2021-04-07 14:27:43 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
2021-02-10 11:51:50 +01:00
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2021-04-06 14:31:00 +02:00
|
|
|
import scala.Tuple2;
|
2021-01-18 16:48:08 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
public class SparkCreateOrgsDedupRecord extends AbstractSparkAction {
|
|
|
|
private static final Logger log = LoggerFactory.getLogger(SparkCreateOrgsDedupRecord.class);
|
2021-02-10 11:51:50 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
public SparkCreateOrgsDedupRecord(ArgumentApplicationParser parser, SparkSession spark) {
|
2021-02-10 11:51:50 +01:00
|
|
|
super(parser, spark);
|
|
|
|
}
|
|
|
|
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils
|
|
|
|
.toString(
|
2021-04-06 14:31:00 +02:00
|
|
|
SparkCreateOrgsDedupRecord.class
|
2021-02-10 11:51:50 +01:00
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/oa/dedup/copyOpenorgs_parameters.json")));
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
2021-04-06 14:31:00 +02:00
|
|
|
new SparkCreateOrgsDedupRecord(parser, getSparkSession(conf))
|
2021-02-10 11:51:50 +01:00
|
|
|
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
|
|
|
|
}
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public void run(ISLookUpService isLookUpService)
|
|
|
|
throws DocumentException, IOException, ISLookUpException {
|
|
|
|
|
|
|
|
// read oozie parameters
|
|
|
|
final String graphBasePath = parser.get("graphBasePath");
|
|
|
|
final String actionSetId = parser.get("actionSetId");
|
|
|
|
final String workingPath = parser.get("workingPath");
|
|
|
|
final int numPartitions = Optional
|
|
|
|
.ofNullable(parser.get("numPartitions"))
|
|
|
|
.map(Integer::valueOf)
|
|
|
|
.orElse(NUM_PARTITIONS);
|
|
|
|
|
|
|
|
log.info("numPartitions: '{}'", numPartitions);
|
|
|
|
log.info("graphBasePath: '{}'", graphBasePath);
|
|
|
|
log.info("actionSetId: '{}'", actionSetId);
|
|
|
|
log.info("workingPath: '{}'", workingPath);
|
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
log.info("Copying organization dedup records to the working dir");
|
2021-02-10 11:51:50 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
final String outputPath = DedupUtility.createDedupRecordPath(workingPath, actionSetId, "organization");
|
2021-02-10 11:51:50 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
final String entityPath = DedupUtility.createEntityPath(graphBasePath, "organization");
|
2021-02-10 11:51:50 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
final String mergeRelsPath = DedupUtility.createMergeRelPath(workingPath, actionSetId, "organization");
|
|
|
|
|
|
|
|
rootOrganization(spark, entityPath, mergeRelsPath)
|
2021-02-10 11:51:50 +01:00
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(outputPath);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
public static Dataset<Organization> rootOrganization(
|
2021-02-10 11:51:50 +01:00
|
|
|
final SparkSession spark,
|
2021-04-06 14:31:00 +02:00
|
|
|
final String entitiesInputPath,
|
|
|
|
final String mergeRelsPath) {
|
2021-02-26 10:19:28 +01:00
|
|
|
|
|
|
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
2021-04-06 14:31:00 +02:00
|
|
|
|
2021-04-07 14:27:43 +02:00
|
|
|
JavaPairRDD<String, Organization> entities = sc
|
|
|
|
.textFile(entitiesInputPath)
|
|
|
|
.map(it -> OBJECT_MAPPER.readValue(it, Organization.class))
|
|
|
|
.mapToPair(o -> new Tuple2<>(o.getId(), o));
|
2021-02-26 10:19:28 +01:00
|
|
|
|
2021-03-19 16:57:40 +01:00
|
|
|
log.info("Number of organization entities processed: {}", entities.count());
|
|
|
|
|
2021-04-07 14:27:43 +02:00
|
|
|
// collect root ids (ids in the source of 'merges' relations
|
2021-04-06 14:31:00 +02:00
|
|
|
JavaPairRDD<String, String> roots = spark
|
2021-04-07 14:27:43 +02:00
|
|
|
.read()
|
|
|
|
.load(mergeRelsPath)
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass == 'merges'")
|
|
|
|
.map(
|
|
|
|
(MapFunction<Relation, Tuple2<String, String>>) r -> new Tuple2<>(r.getSource(), "root"),
|
|
|
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()))
|
|
|
|
.toJavaRDD()
|
|
|
|
.mapToPair(t -> t)
|
|
|
|
.distinct();
|
|
|
|
|
|
|
|
Dataset<Organization> rootOrgs = spark
|
|
|
|
.createDataset(
|
2021-04-06 14:31:00 +02:00
|
|
|
entities
|
2021-04-07 14:27:43 +02:00
|
|
|
.leftOuterJoin(roots)
|
|
|
|
.filter(e -> e._2()._2().isPresent()) // if it has been joined with 'root' then it's a root record
|
|
|
|
.map(e -> e._2()._1())
|
|
|
|
.rdd(),
|
2021-04-06 14:31:00 +02:00
|
|
|
Encoders.bean(Organization.class));
|
2021-03-19 16:57:40 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
log.info("Number of Root organization: {}", entities.count());
|
2021-02-10 11:51:50 +01:00
|
|
|
|
2021-04-06 14:31:00 +02:00
|
|
|
return rootOrgs;
|
2021-02-10 11:51:50 +01:00
|
|
|
}
|
2021-01-18 16:48:08 +01:00
|
|
|
|
|
|
|
}
|