fixed behaviour of DedupRecordFactory

This commit is contained in:
Claudio Atzori 2020-04-20 18:44:06 +02:00
parent ede1af3d85
commit eb8a020859
3 changed files with 239 additions and 139 deletions

View File

@ -5,29 +5,29 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.dhp.schema.common.ModelSupport; import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.pace.config.DedupConfig;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.MapGroupsFunction; import org.apache.spark.api.java.function.MapGroupsFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2; import scala.Tuple2;
public class DedupRecordFactory { public class DedupRecordFactory {
private static final Logger log = LoggerFactory.getLogger(DedupRecordFactory.class);
protected static final ObjectMapper OBJECT_MAPPER = protected static final ObjectMapper OBJECT_MAPPER =
new com.fasterxml.jackson.databind.ObjectMapper() new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
public static <T extends OafEntity> Dataset<T> createDedupRecord( public static <T extends OafEntity> Dataset<T> createDedupRecord(
final SparkSession spark, final SparkSession spark,
final String mergeRelsInputPath, final String mergeRelsInputPath,
final String entitiesInputPath, final String entitiesInputPath,
final Class<T> clazz, final Class<T> clazz) {
final DedupConfig dedupConf) {
long ts = System.currentTimeMillis(); long ts = System.currentTimeMillis();
@ -54,40 +54,39 @@ public class DedupRecordFactory {
r -> new Tuple2<>(r.getSource(), r.getTarget()), r -> new Tuple2<>(r.getSource(), r.getTarget()),
Encoders.tuple(Encoders.STRING(), Encoders.STRING())); Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
// <dedup_id, json_entity_merged>
return mergeRels return mergeRels
.joinWith(entities, mergeRels.col("_1").equalTo(entities.col("_1")), "left_outer") .joinWith(entities, mergeRels.col("_2").equalTo(entities.col("_1")), "inner")
.filter(
(FilterFunction<Tuple2<Tuple2<String, String>, Tuple2<String, T>>>)
value -> value._2() != null)
.map( .map(
(MapFunction<Tuple2<Tuple2<String, String>, Tuple2<String, T>>, T>) (MapFunction<
value -> value._2()._2(), Tuple2<Tuple2<String, String>, Tuple2<String, T>>,
Encoders.kryo(clazz)) Tuple2<String, T>>)
.groupByKey((MapFunction<T, String>) value -> value.getId(), Encoders.STRING()) value -> new Tuple2<>(value._1()._1(), value._2()._2()),
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)))
.groupByKey(
(MapFunction<Tuple2<String, T>, String>) entity -> entity._1(),
Encoders.STRING())
.mapGroups( .mapGroups(
(MapGroupsFunction<String, T, T>) (MapGroupsFunction<String, Tuple2<String, T>, T>)
(key, values) -> entityMerger(key, values, ts, clazz), (key, values) -> entityMerger(key, values, ts, clazz),
Encoders.bean(clazz)); Encoders.bean(clazz));
} }
private static <T extends OafEntity> T entityMerger( private static <T extends OafEntity> T entityMerger(
String id, Iterator<T> entities, final long ts, Class<T> clazz) { String id, Iterator<Tuple2<String, T>> entities, long ts, Class<T> clazz) {
try { try {
T entity = clazz.newInstance(); T entity = clazz.newInstance();
entity.setId(id); entity.setId(id);
if (entity.getDataInfo() == null) { entity.setDataInfo(new DataInfo());
entity.setDataInfo(new DataInfo());
}
entity.getDataInfo().setTrust("0.9"); entity.getDataInfo().setTrust("0.9");
entity.setLastupdatetimestamp(ts); entity.setLastupdatetimestamp(ts);
final Collection<String> dates = Lists.newArrayList(); final Collection<String> dates = Lists.newArrayList();
entities.forEachRemaining( entities.forEachRemaining(
e -> { t -> {
entity.mergeFrom(e); T duplicate = t._2();
if (ModelSupport.isSubClass(e, Result.class)) { entity.mergeFrom(duplicate);
Result r1 = (Result) e; if (ModelSupport.isSubClass(duplicate, Result.class)) {
Result r1 = (Result) duplicate;
Result er = (Result) entity; Result er = (Result) entity;
er.setAuthor(DedupUtility.mergeAuthor(er.getAuthor(), r1.getAuthor())); er.setAuthor(DedupUtility.mergeAuthor(er.getAuthor(), r1.getAuthor()));

View File

@ -71,7 +71,7 @@ public class SparkCreateDedupRecord extends AbstractSparkAction {
Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity)); Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz, dedupConf) DedupRecordFactory.createDedupRecord(spark, mergeRelPath, entityPath, clazz)
.map( .map(
(MapFunction<OafEntity, String>) (MapFunction<OafEntity, String>)
value -> OBJECT_MAPPER.writeValueAsString(value), value -> OBJECT_MAPPER.writeValueAsString(value),

View File

@ -1,10 +1,19 @@
package eu.dnetlib.dhp.oa.dedup; package eu.dnetlib.dhp.oa.dedup;
import static java.nio.file.Files.createTempDirectory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.lenient;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.pace.util.MapDocumentUtil; import eu.dnetlib.pace.util.MapDocumentUtil;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.net.URISyntaxException;
import java.nio.file.Paths;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
@ -23,16 +32,6 @@ import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import scala.Tuple2; import scala.Tuple2;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.net.URISyntaxException;
import java.nio.file.Paths;
import static java.nio.file.Files.createTempDirectory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.lenient;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
@TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class SparkDedupTest implements Serializable { public class SparkDedupTest implements Serializable {
@ -46,65 +45,104 @@ public class SparkDedupTest implements Serializable {
private static String testGraphBasePath; private static String testGraphBasePath;
private static String testOutputBasePath; private static String testOutputBasePath;
private static String testDedupGraphBasePath; private static String testDedupGraphBasePath;
private final static String testActionSetId = "test-orchestrator"; private static final String testActionSetId = "test-orchestrator";
@BeforeAll @BeforeAll
private static void cleanUp() throws IOException, URISyntaxException { public static void cleanUp() throws IOException, URISyntaxException {
testGraphBasePath = Paths.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI()).toFile().getAbsolutePath(); testGraphBasePath =
Paths.get(
SparkDedupTest.class
.getResource("/eu/dnetlib/dhp/dedup/entities")
.toURI())
.toFile()
.getAbsolutePath();
testOutputBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString(); testOutputBasePath =
testDedupGraphBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString(); createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
.toAbsolutePath()
.toString();
testDedupGraphBasePath =
createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
.toAbsolutePath()
.toString();
FileUtils.deleteDirectory(new File(testOutputBasePath)); FileUtils.deleteDirectory(new File(testOutputBasePath));
FileUtils.deleteDirectory(new File(testDedupGraphBasePath)); FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
spark = SparkSession spark =
.builder() SparkSession.builder()
.appName(SparkCreateSimRels.class.getSimpleName()) .appName(SparkCreateSimRels.class.getSimpleName())
.master("local[*]") .master("local[*]")
.config(new SparkConf()) .config(new SparkConf())
.getOrCreate(); .getOrCreate();
jsc = JavaSparkContext.fromSparkContext(spark.sparkContext()); jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
} }
@BeforeEach @BeforeEach
private void setUp() throws IOException, ISLookUpException { public void setUp() throws IOException, ISLookUpException {
lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains(testActionSetId))) lenient()
.thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/profiles/mock_orchestrator.xml"))); .when(isLookUpService.getResourceProfileByQuery(Mockito.contains(testActionSetId)))
.thenReturn(
IOUtils.toString(
SparkDedupTest.class.getResourceAsStream(
"/eu/dnetlib/dhp/dedup/profiles/mock_orchestrator.xml")));
lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains("organization"))) lenient()
.thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json"))); .when(isLookUpService.getResourceProfileByQuery(Mockito.contains("organization")))
.thenReturn(
IOUtils.toString(
SparkDedupTest.class.getResourceAsStream(
"/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")));
lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication"))) lenient()
.thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json"))); .when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication")))
.thenReturn(
lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains("software"))) IOUtils.toString(
.thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/sw.curr.conf.json"))); SparkDedupTest.class.getResourceAsStream(
"/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json")));
lenient()
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("software")))
.thenReturn(
IOUtils.toString(
SparkDedupTest.class.getResourceAsStream(
"/eu/dnetlib/dhp/dedup/conf/sw.curr.conf.json")));
} }
@Test @Test
@Order(1) @Order(1)
public void createSimRelsTest() throws Exception { public void createSimRelsTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser =
IOUtils.toString( new ArgumentApplicationParser(
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json"))); IOUtils.toString(
parser.parseArgument(new String[]{ SparkCreateSimRels.class.getResourceAsStream(
"-i", testGraphBasePath, "/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
"-asi", testActionSetId, parser.parseArgument(
"-la", "lookupurl", new String[] {
"-w", testOutputBasePath}); "-i", testGraphBasePath,
"-asi", testActionSetId,
"-la", "lookupurl",
"-w", testOutputBasePath
});
new SparkCreateSimRels(parser, spark).run(isLookUpService); new SparkCreateSimRels(parser, spark).run(isLookUpService);
long orgs_simrel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/organization_simrel").count(); long orgs_simrel =
long pubs_simrel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/publication_simrel").count(); spark.read()
long sw_simrel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/software_simrel").count(); .load(testOutputBasePath + "/" + testActionSetId + "/organization_simrel")
.count();
long pubs_simrel =
spark.read()
.load(testOutputBasePath + "/" + testActionSetId + "/publication_simrel")
.count();
long sw_simrel =
spark.read()
.load(testOutputBasePath + "/" + testActionSetId + "/software_simrel")
.count();
assertEquals(3432, orgs_simrel); assertEquals(3432, orgs_simrel);
assertEquals(7260, pubs_simrel); assertEquals(7260, pubs_simrel);
@ -115,20 +153,33 @@ public class SparkDedupTest implements Serializable {
@Order(2) @Order(2)
public void createMergeRelsTest() throws Exception { public void createMergeRelsTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser =
IOUtils.toString( new ArgumentApplicationParser(
SparkCreateMergeRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"))); IOUtils.toString(
parser.parseArgument(new String[]{ SparkCreateMergeRels.class.getResourceAsStream(
"-i", testGraphBasePath, "/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
"-asi", testActionSetId, parser.parseArgument(
"-la", "lookupurl", new String[] {
"-w", testOutputBasePath}); "-i", testGraphBasePath,
"-asi", testActionSetId,
"-la", "lookupurl",
"-w", testOutputBasePath
});
new SparkCreateMergeRels(parser, spark).run(isLookUpService); new SparkCreateMergeRels(parser, spark).run(isLookUpService);
long orgs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel").count(); long orgs_mergerel =
long pubs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel").count(); spark.read()
long sw_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel").count(); .load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
.count();
long pubs_mergerel =
spark.read()
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
.count();
long sw_mergerel =
spark.read()
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
.count();
assertEquals(1276, orgs_mergerel); assertEquals(1276, orgs_mergerel);
assertEquals(1460, pubs_mergerel); assertEquals(1460, pubs_mergerel);
@ -139,20 +190,38 @@ public class SparkDedupTest implements Serializable {
@Order(3) @Order(3)
public void createDedupRecordTest() throws Exception { public void createDedupRecordTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser =
IOUtils.toString( new ArgumentApplicationParser(
SparkCreateDedupRecord.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json"))); IOUtils.toString(
parser.parseArgument(new String[]{ SparkCreateDedupRecord.class.getResourceAsStream(
"-i", testGraphBasePath, "/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json")));
"-asi", testActionSetId, parser.parseArgument(
"-la", "lookupurl", new String[] {
"-w", testOutputBasePath}); "-i", testGraphBasePath,
"-asi", testActionSetId,
"-la", "lookupurl",
"-w", testOutputBasePath
});
new SparkCreateDedupRecord(parser, spark).run(isLookUpService); new SparkCreateDedupRecord(parser, spark).run(isLookUpService);
long orgs_deduprecord = jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_deduprecord").count(); long orgs_deduprecord =
long pubs_deduprecord = jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/publication_deduprecord").count(); jsc.textFile(
long sw_deduprecord = jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord").count(); testOutputBasePath
+ "/"
+ testActionSetId
+ "/organization_deduprecord")
.count();
long pubs_deduprecord =
jsc.textFile(
testOutputBasePath
+ "/"
+ testActionSetId
+ "/publication_deduprecord")
.count();
long sw_deduprecord =
jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord")
.count();
assertEquals(82, orgs_deduprecord); assertEquals(82, orgs_deduprecord);
assertEquals(66, pubs_deduprecord); assertEquals(66, pubs_deduprecord);
@ -163,14 +232,17 @@ public class SparkDedupTest implements Serializable {
@Order(4) @Order(4)
public void updateEntityTest() throws Exception { public void updateEntityTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser =
IOUtils.toString( new ArgumentApplicationParser(
SparkUpdateEntity.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json"))); IOUtils.toString(
parser.parseArgument(new String[]{ SparkUpdateEntity.class.getResourceAsStream(
"-i", testGraphBasePath, "/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
"-w", testOutputBasePath, parser.parseArgument(
"-o", testDedupGraphBasePath new String[] {
}); "-i", testGraphBasePath,
"-w", testOutputBasePath,
"-o", testDedupGraphBasePath
});
new SparkUpdateEntity(parser, spark).run(isLookUpService); new SparkUpdateEntity(parser, spark).run(isLookUpService);
@ -180,19 +252,25 @@ public class SparkDedupTest implements Serializable {
long datasource = jsc.textFile(testDedupGraphBasePath + "/datasource").count(); long datasource = jsc.textFile(testDedupGraphBasePath + "/datasource").count();
long softwares = jsc.textFile(testDedupGraphBasePath + "/software").count(); long softwares = jsc.textFile(testDedupGraphBasePath + "/software").count();
long mergedOrgs = spark long mergedOrgs =
.read().load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel").as(Encoders.bean(Relation.class)) spark.read()
.where("relClass=='merges'") .load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
.javaRDD() .as(Encoders.bean(Relation.class))
.map(Relation::getTarget) .where("relClass=='merges'")
.distinct().count(); .javaRDD()
.map(Relation::getTarget)
.distinct()
.count();
long mergedPubs = spark long mergedPubs =
.read().load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel").as(Encoders.bean(Relation.class)) spark.read()
.where("relClass=='merges'") .load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
.javaRDD() .as(Encoders.bean(Relation.class))
.map(Relation::getTarget) .where("relClass=='merges'")
.distinct().count(); .javaRDD()
.map(Relation::getTarget)
.distinct()
.count();
assertEquals(831, publications); assertEquals(831, publications);
assertEquals(835, organizations); assertEquals(835, organizations);
@ -200,10 +278,15 @@ public class SparkDedupTest implements Serializable {
assertEquals(100, datasource); assertEquals(100, datasource);
assertEquals(200, softwares); assertEquals(200, softwares);
long deletedOrgs = jsc.textFile(testDedupGraphBasePath + "/organization") long deletedOrgs =
.filter(this::isDeletedByInference).count(); jsc.textFile(testDedupGraphBasePath + "/organization")
long deletedPubs = jsc.textFile(testDedupGraphBasePath + "/publication") .filter(this::isDeletedByInference)
.filter(this::isDeletedByInference).count(); .count();
long deletedPubs =
jsc.textFile(testDedupGraphBasePath + "/publication")
.filter(this::isDeletedByInference)
.count();
assertEquals(mergedOrgs, deletedOrgs); assertEquals(mergedOrgs, deletedOrgs);
assertEquals(mergedPubs, deletedPubs); assertEquals(mergedPubs, deletedPubs);
@ -213,14 +296,17 @@ public class SparkDedupTest implements Serializable {
@Order(5) @Order(5)
public void propagateRelationTest() throws Exception { public void propagateRelationTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser =
IOUtils.toString( new ArgumentApplicationParser(
SparkPropagateRelation.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json"))); IOUtils.toString(
parser.parseArgument(new String[]{ SparkPropagateRelation.class.getResourceAsStream(
"-i", testGraphBasePath, "/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json")));
"-w", testOutputBasePath, parser.parseArgument(
"-o", testDedupGraphBasePath new String[] {
}); "-i", testGraphBasePath,
"-w", testOutputBasePath,
"-o", testDedupGraphBasePath
});
new SparkPropagateRelation(parser, spark).run(isLookUpService); new SparkPropagateRelation(parser, spark).run(isLookUpService);
@ -228,22 +314,37 @@ public class SparkDedupTest implements Serializable {
assertEquals(826, relations); assertEquals(826, relations);
//check deletedbyinference // check deletedbyinference
final Dataset<Relation> mergeRels = spark.read().load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*")).as(Encoders.bean(Relation.class)); final Dataset<Relation> mergeRels =
final JavaPairRDD<String, String> mergedIds = mergeRels spark.read()
.where("relClass == 'merges'") .load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*"))
.select(mergeRels.col("target")) .as(Encoders.bean(Relation.class));
.distinct() final JavaPairRDD<String, String> mergedIds =
.toJavaRDD() mergeRels
.mapToPair((PairFunction<Row, String, String>) r -> new Tuple2<String, String>(r.getString(0), "d")); .where("relClass == 'merges'")
.select(mergeRels.col("target"))
.distinct()
.toJavaRDD()
.mapToPair(
(PairFunction<Row, String, String>)
r -> new Tuple2<String, String>(r.getString(0), "d"));
JavaRDD<String> toCheck = jsc.textFile(testDedupGraphBasePath + "/relation") JavaRDD<String> toCheck =
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.source", json), json)) jsc.textFile(testDedupGraphBasePath + "/relation")
.join(mergedIds) .mapToPair(
.map(t -> t._2()._1()) json ->
.mapToPair(json -> new Tuple2<>(MapDocumentUtil.getJPathString("$.target", json), json)) new Tuple2<>(
.join(mergedIds) MapDocumentUtil.getJPathString("$.source", json),
.map(t -> t._2()._1()); json))
.join(mergedIds)
.map(t -> t._2()._1())
.mapToPair(
json ->
new Tuple2<>(
MapDocumentUtil.getJPathString("$.target", json),
json))
.join(mergedIds)
.map(t -> t._2()._1());
long deletedbyinference = toCheck.filter(this::isDeletedByInference).count(); long deletedbyinference = toCheck.filter(this::isDeletedByInference).count();
long updated = toCheck.count(); long updated = toCheck.count();
@ -258,6 +359,6 @@ public class SparkDedupTest implements Serializable {
} }
public boolean isDeletedByInference(String s) { public boolean isDeletedByInference(String s) {
return s.contains("\"deletedbyinference\":true"); return s.contains("\"deletedbyinference\":true");
} }
} }