forked from D-Net/dnet-hadoop
minor
This commit is contained in:
parent
43053a286d
commit
dbf3ba051a
|
@ -11,6 +11,7 @@
|
||||||
|
|
||||||
<artifactId>dhp-blacklist</artifactId>
|
<artifactId>dhp-blacklist</artifactId>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>eu.dnetlib.dhp</groupId>
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
<artifactId>dhp-graph-mapper</artifactId>
|
<artifactId>dhp-graph-mapper</artifactId>
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.blacklist;
|
package eu.dnetlib.dhp.blacklist;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
import com.google.gson.Gson;
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import java.util.Optional;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
@ -14,11 +15,10 @@ import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
public class PrepareMergedRelationJob {
|
public class PrepareMergedRelationJob {
|
||||||
|
|
||||||
|
@ -56,8 +56,6 @@ public class PrepareMergedRelationJob {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
// removeOutputDir(spark, potentialUpdatePath);
|
|
||||||
// removeOutputDir(spark, alreadyLinkedPath);
|
|
||||||
selectMergesRelations(
|
selectMergesRelations(
|
||||||
spark,
|
spark,
|
||||||
inputPath,
|
inputPath,
|
||||||
|
@ -70,7 +68,10 @@ public class PrepareMergedRelationJob {
|
||||||
Dataset<Relation> relation = readRelations(spark, inputPath);
|
Dataset<Relation> relation = readRelations(spark, inputPath);
|
||||||
relation.createOrReplaceTempView("relation");
|
relation.createOrReplaceTempView("relation");
|
||||||
|
|
||||||
spark.sql("Select * from relation where relclass = 'merges' and datainfo.deletedbyinference = false")
|
spark
|
||||||
|
.sql("Select * from relation " +
|
||||||
|
"where relclass = 'merges' " +
|
||||||
|
"and datainfo.deletedbyinference = false")
|
||||||
.as(Encoders.bean(Relation.class))
|
.as(Encoders.bean(Relation.class))
|
||||||
.toJSON()
|
.toJSON()
|
||||||
.write()
|
.write()
|
||||||
|
|
|
@ -55,7 +55,6 @@ public class SparkRemoveBlacklistedRelationJob {
|
||||||
log.info("mergesPath {}: ", mergesPath);
|
log.info("mergesPath {}: ", mergesPath);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
|
@ -68,6 +67,8 @@ public class SparkRemoveBlacklistedRelationJob {
|
||||||
outputPath,
|
outputPath,
|
||||||
mergesPath);
|
mergesPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void removeBlacklistedRelations(SparkSession spark, String blacklistPath, String inputPath,
|
private static void removeBlacklistedRelations(SparkSession spark, String blacklistPath, String inputPath,
|
||||||
|
@ -78,7 +79,6 @@ public class SparkRemoveBlacklistedRelationJob {
|
||||||
|
|
||||||
Dataset<Relation> dedupSource = blackListed
|
Dataset<Relation> dedupSource = blackListed
|
||||||
.joinWith(mergesRelation, blackListed.col("source").equalTo(mergesRelation.col("target")), "left_outer")
|
.joinWith(mergesRelation, blackListed.col("source").equalTo(mergesRelation.col("target")), "left_outer")
|
||||||
// .joinWith(inputRelation,blackListed.col("target").equalTo(inputRelation.col("target")),"left_outer")
|
|
||||||
.map(c -> {
|
.map(c -> {
|
||||||
Optional<Relation> merged = Optional.ofNullable(c._2());
|
Optional<Relation> merged = Optional.ofNullable(c._2());
|
||||||
Relation bl = c._1();
|
Relation bl = c._1();
|
||||||
|
|
|
@ -63,7 +63,7 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/mergesRelation</arg>
|
||||||
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
<arg>--hive_metastore_uris</arg><arg>${hive_metastore_uris}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="apply_blacklist"/>
|
<ok to="apply_blacklist"/>
|
||||||
|
@ -89,6 +89,7 @@
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/relation</arg>
|
||||||
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
|
<arg>--hdfsPath</arg><arg>${workingDir}/blacklist</arg>
|
||||||
|
<arg>--mergesPath</arg><arg>${workingDir}/mergesRelation</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -22,5 +22,12 @@
|
||||||
"paramLongName": "isSparkSessionManaged",
|
"paramLongName": "isSparkSessionManaged",
|
||||||
"paramDescription": "true if the spark session is managed",
|
"paramDescription": "true if the spark session is managed",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "m",
|
||||||
|
"paramLongName": "mergesPath",
|
||||||
|
"paramDescription": "true if the spark session is managed",
|
||||||
|
"paramRequired": true
|
||||||
|
|
||||||
}
|
}
|
||||||
]
|
]
|
Loading…
Reference in New Issue