master #11

Manually merged
claudio.atzori merged 275 commits from :master into enrichment_wfs 2020-05-11 15:14:56 +02:00
4 changed files with 13 additions and 20 deletions
Showing only changes of commit e218360f8a - Show all commits

View File

@ -11,13 +11,6 @@
<artifactId>dhp-blacklist</artifactId>
<dependencies>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-graph-mapper</artifactId>
<version>1.1.7-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-common</artifactId>

View File

@ -69,7 +69,8 @@ public class PrepareMergedRelationJob {
relation.createOrReplaceTempView("relation");
spark
.sql("Select * from relation " +
.sql(
"Select * from relation " +
"where relclass = 'merges' " +
"and datainfo.deletedbyinference = false")
.as(Encoders.bean(Relation.class))

View File

@ -23,7 +23,7 @@ import org.apache.hadoop.fs.Path;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.raw.common.DbClient;
import eu.dnetlib.dhp.common.DbClient;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.common.RelationInverse;
import eu.dnetlib.dhp.schema.oaf.Relation;

View File

@ -57,17 +57,16 @@ public class SparkRemoveBlacklistedRelationJob {
SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
removeBlacklistedRelations(
spark,
inputPath,
blacklistPath,
outputPath,
mergesPath);
});
conf,
isSparkSessionManaged,
spark -> {
removeBlacklistedRelations(
spark,
inputPath,
blacklistPath,
outputPath,
mergesPath);
});
}