1
0
Fork 0
This commit is contained in:
Miriam Baglioni 2020-04-30 20:22:07 +02:00
parent 43053a286d
commit dbf3ba051a
5 changed files with 84 additions and 74 deletions

View File

@ -11,6 +11,7 @@
<artifactId>dhp-blacklist</artifactId> <artifactId>dhp-blacklist</artifactId>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>eu.dnetlib.dhp</groupId> <groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-graph-mapper</artifactId> <artifactId>dhp-graph-mapper</artifactId>

View File

@ -1,9 +1,10 @@
package eu.dnetlib.dhp.blacklist; package eu.dnetlib.dhp.blacklist;
import com.fasterxml.jackson.databind.ObjectMapper; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import com.google.gson.Gson;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import java.util.Optional;
import eu.dnetlib.dhp.schema.oaf.Relation;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -14,78 +15,78 @@ import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Arrays; import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
import java.util.Optional;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.oaf.Relation;
public class PrepareMergedRelationJob { public class PrepareMergedRelationJob {
private static final Logger log = LoggerFactory.getLogger(PrepareMergedRelationJob.class); private static final Logger log = LoggerFactory.getLogger(PrepareMergedRelationJob.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils String jsonConfiguration = IOUtils
.toString( .toString(
PrepareMergedRelationJob.class PrepareMergedRelationJob.class
.getResourceAsStream( .getResourceAsStream(
"/eu/dnetlib/dhp/blacklist/input_preparerelation_parameters.json")); "/eu/dnetlib/dhp/blacklist/input_preparerelation_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args); parser.parseArgument(args);
Boolean isSparkSessionManaged = Optional Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged")) .ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf) .map(Boolean::valueOf)
.orElse(Boolean.TRUE); .orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged); log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
String inputPath = parser.get("sourcePath"); String inputPath = parser.get("sourcePath");
log.info("inputPath: {}", inputPath); log.info("inputPath: {}", inputPath);
String outputPath = parser.get("outputPath"); String outputPath = parser.get("outputPath");
log.info("outputPath: {} " , outputPath); log.info("outputPath: {} ", outputPath);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris")); conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
runWithSparkHiveSession( runWithSparkHiveSession(
conf, conf,
isSparkSessionManaged, isSparkSessionManaged,
spark -> { spark -> {
// removeOutputDir(spark, potentialUpdatePath); selectMergesRelations(
// removeOutputDir(spark, alreadyLinkedPath); spark,
selectMergesRelations( inputPath,
spark, outputPath);
inputPath, });
outputPath); }
});
}
private static void selectMergesRelations(SparkSession spark, String inputPath, String outputPath) { private static void selectMergesRelations(SparkSession spark, String inputPath, String outputPath) {
Dataset<Relation> relation = readRelations(spark, inputPath); Dataset<Relation> relation = readRelations(spark, inputPath);
relation.createOrReplaceTempView("relation"); relation.createOrReplaceTempView("relation");
spark.sql("Select * from relation where relclass = 'merges' and datainfo.deletedbyinference = false") spark
.as(Encoders.bean(Relation.class)) .sql("Select * from relation " +
.toJSON() "where relclass = 'merges' " +
.write() "and datainfo.deletedbyinference = false")
.mode(SaveMode.Overwrite) .as(Encoders.bean(Relation.class))
.option("compression","gzip") .toJSON()
.text(outputPath); .write()
} .mode(SaveMode.Overwrite)
.option("compression", "gzip")
.text(outputPath);
}
public static org.apache.spark.sql.Dataset<Relation> readRelations( public static org.apache.spark.sql.Dataset<Relation> readRelations(
SparkSession spark, String inputPath) { SparkSession spark, String inputPath) {
return spark return spark
.read() .read()
.textFile(inputPath) .textFile(inputPath)
.map( .map(
(MapFunction<String, Relation>) value -> OBJECT_MAPPER.readValue(value, Relation.class), (MapFunction<String, Relation>) value -> OBJECT_MAPPER.readValue(value, Relation.class),
Encoders.bean(Relation.class)); Encoders.bean(Relation.class));
} }
} }

View File

@ -55,19 +55,20 @@ public class SparkRemoveBlacklistedRelationJob {
log.info("mergesPath {}: ", mergesPath); log.info("mergesPath {}: ", mergesPath);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
runWithSparkSession( runWithSparkSession(
conf, conf,
isSparkSessionManaged, isSparkSessionManaged,
spark -> { spark -> {
removeBlacklistedRelations( removeBlacklistedRelations(
spark, spark,
inputPath, inputPath,
blacklistPath, blacklistPath,
outputPath, outputPath,
mergesPath); mergesPath);
}); });
} }
private static void removeBlacklistedRelations(SparkSession spark, String blacklistPath, String inputPath, private static void removeBlacklistedRelations(SparkSession spark, String blacklistPath, String inputPath,
@ -78,7 +79,6 @@ public class SparkRemoveBlacklistedRelationJob {
Dataset<Relation> dedupSource = blackListed Dataset<Relation> dedupSource = blackListed
.joinWith(mergesRelation, blackListed.col("source").equalTo(mergesRelation.col("target")), "left_outer") .joinWith(mergesRelation, blackListed.col("source").equalTo(mergesRelation.col("target")), "left_outer")
// .joinWith(inputRelation,blackListed.col("target").equalTo(inputRelation.col("target")),"left_outer")
.map(c -> { .map(c -> {
Optional<Relation> merged = Optional.ofNullable(c._2()); Optional<Relation> merged = Optional.ofNullable(c._2());
Relation bl = c._1(); Relation bl = c._1();

View File

@ -63,7 +63,7 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts> </spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--outputPath</arg><arg>${workingDir}/relation</arg> <arg>--outputPath</arg><arg>${workingDir}/mergesRelation</arg>
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg> <arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
</spark> </spark>
<ok to="apply_blacklist"/> <ok to="apply_blacklist"/>
@ -89,6 +89,7 @@
<arg>--sourcePath</arg><arg>${sourcePath}</arg> <arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--outputPath</arg><arg>${workingDir}/relation</arg> <arg>--outputPath</arg><arg>${workingDir}/relation</arg>
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg> <arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
<arg>--mergesPath</arg><arg>${workingDir}/mergesRelation</arg>
</spark> </spark>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -22,5 +22,12 @@
"paramLongName": "isSparkSessionManaged", "paramLongName": "isSparkSessionManaged",
"paramDescription": "true if the spark session is managed", "paramDescription": "true if the spark session is managed",
"paramRequired": false "paramRequired": false
},
{
"paramName": "m",
"paramLongName": "mergesPath",
"paramDescription": "true if the spark session is managed",
"paramRequired": true
} }
] ]