set configuration property in openorgs duplicates wf

This commit is contained in:
miconis 2021-10-07 15:39:55 +02:00
parent 9646b9fd98
commit 611ca511db
4 changed files with 761 additions and 744 deletions

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.oa.dedup; package eu.dnetlib.dhp.oa.dedup;
import java.io.IOException; import java.io.IOException;
@ -5,10 +6,7 @@ import java.util.Optional;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
@ -21,7 +19,6 @@ import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.dedup.model.Block;
import eu.dnetlib.dhp.schema.oaf.DataInfo; import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.utils.ISLookupClientFactory; import eu.dnetlib.dhp.utils.ISLookupClientFactory;
@ -31,121 +28,127 @@ import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.model.MapDocument; import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.MapDocumentUtil; import eu.dnetlib.pace.util.MapDocumentUtil;
import scala.Tuple2; import scala.Tuple2;
import scala.Tuple3;
public class SparkWhitelistSimRels extends AbstractSparkAction { public class SparkWhitelistSimRels extends AbstractSparkAction {
private static final Logger log = LoggerFactory.getLogger(SparkCreateSimRels.class); private static final Logger log = LoggerFactory.getLogger(SparkCreateSimRels.class);
private static final String WHITELIST_SEPARATOR = "####"; private static final String WHITELIST_SEPARATOR = "####";
public SparkWhitelistSimRels(ArgumentApplicationParser parser, SparkSession spark) { public SparkWhitelistSimRels(ArgumentApplicationParser parser, SparkSession spark) {
super(parser, spark); super(parser, spark);
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils IOUtils
.toString( .toString(
SparkCreateSimRels.class SparkCreateSimRels.class
.getResourceAsStream( .getResourceAsStream(
"/eu/dnetlib/dhp/oa/dedup/whitelistSimRels_parameters.json"))); "/eu/dnetlib/dhp/oa/dedup/whitelistSimRels_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
new SparkWhitelistSimRels(parser, getSparkSession(conf)) new SparkWhitelistSimRels(parser, getSparkSession(conf))
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl"))); .run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
} }
@Override @Override
public void run(ISLookUpService isLookUpService) public void run(ISLookUpService isLookUpService)
throws DocumentException, IOException, ISLookUpException, SAXException { throws DocumentException, IOException, ISLookUpException, SAXException {
// read oozie parameters // read oozie parameters
final String graphBasePath = parser.get("graphBasePath"); final String graphBasePath = parser.get("graphBasePath");
final String isLookUpUrl = parser.get("isLookUpUrl"); final String isLookUpUrl = parser.get("isLookUpUrl");
final String actionSetId = parser.get("actionSetId"); final String actionSetId = parser.get("actionSetId");
final String workingPath = parser.get("workingPath"); final String workingPath = parser.get("workingPath");
final int numPartitions = Optional final int numPartitions = Optional
.ofNullable(parser.get("numPartitions")) .ofNullable(parser.get("numPartitions"))
.map(Integer::valueOf) .map(Integer::valueOf)
.orElse(NUM_PARTITIONS); .orElse(NUM_PARTITIONS);
final String whiteListPath = parser.get("whiteListPath"); final String whiteListPath = parser.get("whiteListPath");
log.info("numPartitions: '{}'", numPartitions); log.info("numPartitions: '{}'", numPartitions);
log.info("graphBasePath: '{}'", graphBasePath); log.info("graphBasePath: '{}'", graphBasePath);
log.info("isLookUpUrl: '{}'", isLookUpUrl); log.info("isLookUpUrl: '{}'", isLookUpUrl);
log.info("actionSetId: '{}'", actionSetId); log.info("actionSetId: '{}'", actionSetId);
log.info("workingPath: '{}'", workingPath); log.info("workingPath: '{}'", workingPath);
log.info("whiteListPath: '{}'", whiteListPath); log.info("whiteListPath: '{}'", whiteListPath);
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
//file format: source####target // file format: source####target
Dataset<Tuple2<String, String>> whiteListRels = spark.createDataset(sc Dataset<Tuple2<String, String>> whiteListRels = spark
.textFile(whiteListPath) .createDataset(
//check if the line is in the correct format: id1####id2 sc
.filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2) .textFile(whiteListPath)
.map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1])) // check if the line is in the correct format: id1####id2
.rdd(), .filter(s -> s.contains(WHITELIST_SEPARATOR) && s.split(WHITELIST_SEPARATOR).length == 2)
Encoders.tuple(Encoders.STRING(), Encoders.STRING())); .map(s -> new Tuple2<>(s.split(WHITELIST_SEPARATOR)[0], s.split(WHITELIST_SEPARATOR)[1]))
.rdd(),
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
// for each dedup configuration // for each dedup configuration
for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) { for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) {
final String entity = dedupConf.getWf().getEntityType(); final String entity = dedupConf.getWf().getEntityType();
final String subEntity = dedupConf.getWf().getSubEntityValue(); final String subEntity = dedupConf.getWf().getSubEntityValue();
log.info("Adding whitelist simrels for: '{}'", subEntity); log.info("Adding whitelist simrels for: '{}'", subEntity);
final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity); final String outputPath = DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity);
Dataset<Tuple2<String, String>> entities = spark.createDataset(sc Dataset<Tuple2<String, String>> entities = spark
.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity)) .createDataset(
.repartition(numPartitions) sc
.mapToPair( .textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
(PairFunction<String, String, String>) s -> { .repartition(numPartitions)
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s); .mapToPair(
return new Tuple2<>(d.getIdentifier(), "present"); (PairFunction<String, String, String>) s -> {
}) MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
.rdd(), return new Tuple2<>(d.getIdentifier(), "present");
Encoders.tuple(Encoders.STRING(), Encoders.STRING())); })
.rdd(),
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels Dataset<Tuple2<String, String>> whiteListRels1 = whiteListRels
.joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner") .joinWith(entities, whiteListRels.col("_1").equalTo(entities.col("_1")), "inner")
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING())); .map(
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1 Dataset<Tuple2<String, String>> whiteListRels2 = whiteListRels1
.joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner") .joinWith(entities, whiteListRels1.col("_2").equalTo(entities.col("_1")), "inner")
.map((MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1, Encoders.tuple(Encoders.STRING(), Encoders.STRING())); .map(
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, String>>, Tuple2<String, String>>) Tuple2::_1,
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
Dataset<Relation> whiteListSimRels = whiteListRels2 Dataset<Relation> whiteListSimRels = whiteListRels2
.map((MapFunction<Tuple2<String, String>, Relation>) .map(
r -> createSimRel(r._1(), r._2(), entity), (MapFunction<Tuple2<String, String>, Relation>) r -> createSimRel(r._1(), r._2(), entity),
Encoders.bean(Relation.class) Encoders.bean(Relation.class));
);
saveParquet(whiteListSimRels, outputPath, SaveMode.Append); saveParquet(whiteListSimRels, outputPath, SaveMode.Append);
} }
} }
private Relation createSimRel(String source, String target, String entity) { private Relation createSimRel(String source, String target, String entity) {
final Relation r = new Relation(); final Relation r = new Relation();
r.setSource(source); r.setSource(source);
r.setTarget(target); r.setTarget(target);
r.setSubRelType("dedupSimilarity"); r.setSubRelType("dedupSimilarity");
r.setRelClass("isSimilarTo"); r.setRelClass("isSimilarTo");
r.setDataInfo(new DataInfo()); r.setDataInfo(new DataInfo());
switch (entity) { switch (entity) {
case "result": case "result":
r.setRelType("resultResult"); r.setRelType("resultResult");
break; break;
case "organization": case "organization":
r.setRelType("organizationOrganization"); r.setRelType("organizationOrganization");
break; break;
default: default:
throw new IllegalArgumentException("unmanaged entity type: " + entity); throw new IllegalArgumentException("unmanaged entity type: " + entity);
} }
return r; return r;
} }
} }

View File

@ -1,7 +1,8 @@
package eu.dnetlib.dhp.oa.dedup; package eu.dnetlib.dhp.oa.dedup;
import com.fasterxml.jackson.databind.ObjectMapper; import java.util.concurrent.TimeUnit;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
@ -10,16 +11,21 @@ import org.apache.http.impl.client.HttpClients;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
public class UpdateOpenorgsJob { public class UpdateOpenorgsJob {
private static final Logger log = LoggerFactory.getLogger(UpdateOpenorgsJob.class); private static final Logger log = LoggerFactory.getLogger(UpdateOpenorgsJob.class);
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString(SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/updateOpenorgsJob_parameters.json"))); IOUtils
parser.parseArgument(args); .toString(
SparkCreateSimRels.class
.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/updateOpenorgsJob_parameters.json")));
parser.parseArgument(args);
final String apiUrl = parser.get("apiUrl"); final String apiUrl = parser.get("apiUrl");
final int delay = Integer.parseInt(parser.get("delay")); final int delay = Integer.parseInt(parser.get("delay"));
@ -28,17 +34,17 @@ public class UpdateOpenorgsJob {
log.info("delay: '{}'", delay); log.info("delay: '{}'", delay);
APIResponse res = httpCall(apiUrl); APIResponse res = httpCall(apiUrl);
while(res!=null && res.getStatus().equals(ImportStatus.RUNNING)){ while (res != null && res.getStatus().equals(ImportStatus.RUNNING)) {
TimeUnit.MINUTES.sleep(delay); TimeUnit.MINUTES.sleep(delay);
res = httpCall(apiUrl + "/status"); res = httpCall(apiUrl + "/status");
} }
if (res==null) { if (res == null) {
log.error("Openorgs Update FAILED: No response"); log.error("Openorgs Update FAILED: No response");
throw new RuntimeException("Openorgs Update FAILED: No response"); throw new RuntimeException("Openorgs Update FAILED: No response");
} }
if (res.getStatus()==null || !res.getStatus().equals(ImportStatus.SUCCESS)) { if (res.getStatus() == null || !res.getStatus().equals(ImportStatus.SUCCESS)) {
log.error("Openorgs Update FAILED: '{}' - '{}'", res.getStatus(), res.getMessage()); log.error("Openorgs Update FAILED: '{}' - '{}'", res.getStatus(), res.getMessage());
throw new RuntimeException(res.getMessage()); throw new RuntimeException(res.getMessage());
} }
@ -107,9 +113,5 @@ class APIResponse {
} }
enum ImportStatus { enum ImportStatus {
SUCCESS, SUCCESS, FAILED, RUNNING, NOT_LAUNCHED, NOT_YET_STARTED
FAILED,
RUNNING,
NOT_LAUNCHED,
NOT_YET_STARTED
} }

View File

@ -267,6 +267,12 @@
<action name="update_openorgs"> <action name="update_openorgs">
<java> <java>
<configuration>
<property>
<name>oozie.launcher.mapreduce.user.classpath.first</name>
<value>true</value>
</property>
</configuration>
<main-class>eu.dnetlib.dhp.oa.dedup.UpdateOpenorgsJob</main-class> <main-class>eu.dnetlib.dhp.oa.dedup.UpdateOpenorgsJob</main-class>
<arg>--apiUrl</arg><arg>${apiUrl}</arg> <arg>--apiUrl</arg><arg>${apiUrl}</arg>
<arg>--delay</arg><arg>5</arg> <arg>--delay</arg><arg>5</arg>