2020-09-29 15:31:46 +02:00
|
|
|
|
2020-04-02 18:37:53 +02:00
|
|
|
package eu.dnetlib.dhp.oa.dedup;
|
2020-04-01 18:48:31 +02:00
|
|
|
|
2023-08-08 15:52:20 +02:00
|
|
|
import static java.nio.file.Files.createTempDirectory;
|
|
|
|
|
|
|
|
import static org.apache.spark.sql.functions.count;
|
|
|
|
import static org.junit.jupiter.api.Assertions.*;
|
|
|
|
import static org.mockito.Mockito.lenient;
|
|
|
|
|
|
|
|
import java.io.File;
|
|
|
|
import java.io.FileReader;
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.io.Serializable;
|
|
|
|
import java.net.URISyntaxException;
|
|
|
|
import java.nio.file.Paths;
|
|
|
|
import java.util.HashSet;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.Optional;
|
|
|
|
import java.util.Set;
|
|
|
|
import java.util.stream.Collectors;
|
|
|
|
|
2020-04-03 18:32:25 +02:00
|
|
|
import org.apache.commons.io.FileUtils;
|
2020-04-01 18:48:31 +02:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-04-03 18:32:25 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
2020-04-08 18:02:30 +02:00
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
2020-07-13 15:28:17 +02:00
|
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
2020-05-08 19:00:38 +02:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
2020-10-20 12:19:46 +02:00
|
|
|
import org.apache.spark.sql.Dataset;
|
2023-09-01 09:32:57 +02:00
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.Row;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
2020-04-03 18:32:25 +02:00
|
|
|
import org.junit.jupiter.api.*;
|
2020-04-02 18:37:53 +02:00
|
|
|
import org.junit.jupiter.api.extension.ExtendWith;
|
2020-04-01 18:48:31 +02:00
|
|
|
import org.mockito.Mock;
|
2020-04-02 18:37:53 +02:00
|
|
|
import org.mockito.Mockito;
|
2020-04-01 19:05:48 +02:00
|
|
|
import org.mockito.junit.jupiter.MockitoExtension;
|
2020-04-28 11:23:29 +02:00
|
|
|
|
2023-08-08 15:52:20 +02:00
|
|
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
import com.google.common.collect.Sets;
|
2020-05-11 10:05:57 +02:00
|
|
|
|
2023-08-08 15:52:20 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
2023-10-02 09:25:12 +02:00
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
2023-08-08 15:52:20 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.*;
|
2023-10-02 09:25:12 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
|
|
import eu.dnetlib.dhp.schema.sx.OafUtils;
|
2023-08-08 15:52:20 +02:00
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2023-10-02 09:25:12 +02:00
|
|
|
import scala.Tuple2;
|
2020-04-20 16:49:01 +02:00
|
|
|
|
2020-04-01 19:05:48 +02:00
|
|
|
@ExtendWith(MockitoExtension.class)
|
2020-04-03 18:32:25 +02:00
|
|
|
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
2020-04-08 18:02:30 +02:00
|
|
|
public class SparkDedupTest implements Serializable {
|
2020-04-01 18:48:31 +02:00
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
@Mock(serializable = true)
|
|
|
|
ISLookUpService isLookUpService;
|
|
|
|
|
|
|
|
private static SparkSession spark;
|
|
|
|
private static JavaSparkContext jsc;
|
|
|
|
|
|
|
|
private static String testGraphBasePath;
|
|
|
|
private static String testOutputBasePath;
|
|
|
|
private static String testDedupGraphBasePath;
|
2023-09-01 09:32:57 +02:00
|
|
|
private static String testConsistencyGraphBasePath;
|
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
private static final String testActionSetId = "test-orchestrator";
|
|
|
|
private static String whitelistPath;
|
|
|
|
private static List<String> whiteList;
|
|
|
|
|
|
|
|
private static String WHITELIST_SEPARATOR = "####";
|
|
|
|
|
|
|
|
@BeforeAll
|
|
|
|
public static void cleanUp() throws IOException, URISyntaxException {
|
|
|
|
|
|
|
|
testGraphBasePath = Paths
|
|
|
|
.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI())
|
|
|
|
.toFile()
|
|
|
|
.getAbsolutePath();
|
2023-09-01 09:32:57 +02:00
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
testOutputBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
|
|
|
.toAbsolutePath()
|
|
|
|
.toString();
|
|
|
|
|
|
|
|
testDedupGraphBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
|
|
|
.toAbsolutePath()
|
|
|
|
.toString();
|
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
testConsistencyGraphBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-")
|
|
|
|
.toAbsolutePath()
|
|
|
|
.toString();
|
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
whitelistPath = Paths
|
|
|
|
.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/whitelist.simrels.txt").toURI())
|
|
|
|
.toFile()
|
|
|
|
.getAbsolutePath();
|
|
|
|
whiteList = IOUtils.readLines(new FileReader(whitelistPath));
|
|
|
|
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
|
|
|
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
|
|
|
|
|
|
|
|
final SparkConf conf = new SparkConf();
|
|
|
|
conf.set("spark.sql.shuffle.partitions", "200");
|
2023-10-02 09:25:12 +02:00
|
|
|
conf.set("spark.sql.warehouse.dir", testOutputBasePath + "/spark-warehouse");
|
2021-10-07 15:39:55 +02:00
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(SparkDedupTest.class.getSimpleName())
|
|
|
|
.master("local[*]")
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
|
|
|
|
|
|
|
jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
@BeforeEach
|
|
|
|
public void setUp() throws IOException, ISLookUpException {
|
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains(testActionSetId)))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/profiles/mock_orchestrator.xml"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("organization")))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication")))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("software")))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/conf/sw.curr.conf.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("dataset")))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/conf/ds.curr.conf.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
lenient()
|
|
|
|
.when(isLookUpService.getResourceProfileByQuery(Mockito.contains("otherresearchproduct")))
|
2022-11-11 16:10:25 +01:00
|
|
|
.thenReturn(classPathResourceAsString("/eu/dnetlib/dhp/dedup/conf/orp.curr.conf.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
@Order(1)
|
|
|
|
void createSimRelsTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i", testGraphBasePath,
|
|
|
|
"-asi", testActionSetId,
|
|
|
|
"-la", "lookupurl",
|
|
|
|
"-w", testOutputBasePath,
|
|
|
|
"-np", "50"
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCreateSimRels(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long orgs_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long pubs_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "publication"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long sw_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "software"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long ds_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "dataset"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long orp_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "otherresearchproduct"))
|
|
|
|
.count();
|
|
|
|
|
2023-07-18 11:38:56 +02:00
|
|
|
System.out.println("orgs_simrel = " + orgs_simrel);
|
|
|
|
System.out.println("pubs_simrel = " + pubs_simrel);
|
|
|
|
System.out.println("sw_simrel = " + sw_simrel);
|
|
|
|
System.out.println("ds_simrel = " + ds_simrel);
|
|
|
|
System.out.println("orp_simrel = " + orp_simrel);
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(751, orgs_simrel);
|
|
|
|
assertEquals(546, pubs_simrel);
|
|
|
|
assertEquals(113, sw_simrel);
|
|
|
|
assertEquals(148, ds_simrel);
|
|
|
|
assertEquals(280, orp_simrel);
|
2023-07-18 11:38:56 +02:00
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
@Order(2)
|
|
|
|
void whitelistSimRelsTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/whitelistSimRels_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i", testGraphBasePath,
|
|
|
|
"-asi", testActionSetId,
|
|
|
|
"-la", "lookupurl",
|
|
|
|
"-w", testOutputBasePath,
|
|
|
|
"-np", "50",
|
|
|
|
"-wl", whitelistPath
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkWhitelistSimRels(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long orgs_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long pubs_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "publication"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long ds_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "dataset"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long orp_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "otherresearchproduct"))
|
|
|
|
.count();
|
|
|
|
|
|
|
|
// entities simrels supposed to be equal to the number of previous step (no rels in whitelist)
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(751, orgs_simrel);
|
|
|
|
assertEquals(546, pubs_simrel);
|
|
|
|
assertEquals(148, ds_simrel);
|
|
|
|
assertEquals(280, orp_simrel);
|
2022-03-15 16:33:03 +01:00
|
|
|
// System.out.println("orgs_simrel = " + orgs_simrel);
|
|
|
|
// System.out.println("pubs_simrel = " + pubs_simrel);
|
|
|
|
// System.out.println("ds_simrel = " + ds_simrel);
|
|
|
|
// System.out.println("orp_simrel = " + orp_simrel);
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
// entities simrels to be different from the number of previous step (new simrels in the whitelist)
|
|
|
|
Dataset<Row> sw_simrel = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "software"));
|
|
|
|
|
|
|
|
// check if the first relation in the whitelist exists
|
|
|
|
assertTrue(
|
|
|
|
sw_simrel
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.toJavaRDD()
|
|
|
|
.filter(
|
|
|
|
rel -> rel.getSource().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[0])
|
|
|
|
&& rel.getTarget().equalsIgnoreCase(whiteList.get(0).split(WHITELIST_SEPARATOR)[1]))
|
|
|
|
.count() > 0);
|
|
|
|
// check if the second relation in the whitelist exists
|
|
|
|
assertTrue(
|
|
|
|
sw_simrel
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.toJavaRDD()
|
|
|
|
.filter(
|
|
|
|
rel -> rel.getSource().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[0])
|
|
|
|
&& rel.getTarget().equalsIgnoreCase(whiteList.get(1).split(WHITELIST_SEPARATOR)[1]))
|
|
|
|
.count() > 0);
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(115, sw_simrel.count());
|
2022-03-15 16:33:03 +01:00
|
|
|
// System.out.println("sw_simrel = " + sw_simrel.count());
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
@Order(3)
|
|
|
|
void cutMergeRelsTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i",
|
|
|
|
testGraphBasePath,
|
|
|
|
"-asi",
|
|
|
|
testActionSetId,
|
|
|
|
"-la",
|
|
|
|
"lookupurl",
|
|
|
|
"-w",
|
|
|
|
testOutputBasePath,
|
|
|
|
"-cc",
|
2023-10-02 09:25:12 +02:00
|
|
|
"3",
|
|
|
|
"-h",
|
|
|
|
""
|
2021-10-07 15:39:55 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long orgs_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.filter((FilterFunction<Relation>) r -> r.getRelClass().equalsIgnoreCase("merges"))
|
|
|
|
.groupBy("source")
|
|
|
|
.agg(count("target").alias("cnt"))
|
|
|
|
.select("source", "cnt")
|
|
|
|
.where("cnt > 3")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long pubs_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.filter((FilterFunction<Relation>) r -> r.getRelClass().equalsIgnoreCase("merges"))
|
|
|
|
.groupBy("source")
|
|
|
|
.agg(count("target").alias("cnt"))
|
|
|
|
.select("source", "cnt")
|
|
|
|
.where("cnt > 3")
|
|
|
|
.count();
|
|
|
|
long sw_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.filter((FilterFunction<Relation>) r -> r.getRelClass().equalsIgnoreCase("merges"))
|
|
|
|
.groupBy("source")
|
|
|
|
.agg(count("target").alias("cnt"))
|
|
|
|
.select("source", "cnt")
|
|
|
|
.where("cnt > 3")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long ds_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.filter((FilterFunction<Relation>) r -> r.getRelClass().equalsIgnoreCase("merges"))
|
|
|
|
.groupBy("source")
|
|
|
|
.agg(count("target").alias("cnt"))
|
|
|
|
.select("source", "cnt")
|
|
|
|
.where("cnt > 3")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long orp_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.filter((FilterFunction<Relation>) r -> r.getRelClass().equalsIgnoreCase("merges"))
|
|
|
|
.groupBy("source")
|
|
|
|
.agg(count("target").alias("cnt"))
|
|
|
|
.select("source", "cnt")
|
|
|
|
.where("cnt > 3")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
assertEquals(0, orgs_mergerel);
|
|
|
|
assertEquals(0, pubs_mergerel);
|
|
|
|
assertEquals(0, sw_mergerel);
|
|
|
|
assertEquals(0, ds_mergerel);
|
|
|
|
assertEquals(0, orp_mergerel);
|
|
|
|
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel"));
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel"));
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath + "/" + testActionSetId + "/software_mergerel"));
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel"));
|
|
|
|
FileUtils
|
|
|
|
.deleteDirectory(new File(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel"));
|
|
|
|
}
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
@Test
|
|
|
|
@Order(3)
|
|
|
|
void createMergeRelsWithPivotHistoryTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"));
|
|
|
|
|
|
|
|
spark.sql("CREATE DATABASE IF NOT EXISTS pivot_history_test");
|
|
|
|
ModelSupport.oafTypes.keySet().forEach(entityType -> {
|
|
|
|
try {
|
|
|
|
spark
|
|
|
|
.read()
|
|
|
|
.json(
|
|
|
|
Paths
|
|
|
|
.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/pivot_history").toURI())
|
|
|
|
.toFile()
|
|
|
|
.getAbsolutePath())
|
|
|
|
.write()
|
|
|
|
.mode("overwrite")
|
|
|
|
.saveAsTable("pivot_history_test." + entityType);
|
|
|
|
} catch (URISyntaxException e) {
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i",
|
|
|
|
testGraphBasePath,
|
|
|
|
"-asi",
|
|
|
|
testActionSetId,
|
|
|
|
"-la",
|
|
|
|
"lookupurl",
|
|
|
|
"-w",
|
|
|
|
testOutputBasePath,
|
|
|
|
"-h",
|
|
|
|
"",
|
|
|
|
"-pivotHistoryDatabase",
|
|
|
|
"pivot_history_test"
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long orgs_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
|
|
|
.count();
|
|
|
|
final Dataset<Relation> pubs = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class));
|
|
|
|
long sw_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
|
|
|
.count();
|
|
|
|
long ds_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long orp_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
final List<Relation> merges = pubs
|
|
|
|
.filter("source == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
|
|
|
.collectAsList();
|
|
|
|
assertEquals(3, merges.size());
|
|
|
|
Set<String> dups = Sets
|
|
|
|
.newHashSet(
|
|
|
|
"50|doi_________::3b1d0d8e8f930826665df9d6b82fbb73",
|
|
|
|
"50|doi_________::d5021b53204e4fdeab6ff5d5bc468032",
|
|
|
|
"50|arXiv_______::c93aeb433eb90ed7a86e29be00791b7c");
|
|
|
|
merges.forEach(r -> {
|
|
|
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
|
|
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
|
|
|
assertEquals(ModelConstants.MERGES, r.getRelClass());
|
|
|
|
assertTrue(dups.contains(r.getTarget()));
|
|
|
|
});
|
|
|
|
|
|
|
|
final List<Relation> mergedIn = pubs
|
|
|
|
.filter("target == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
|
|
|
.collectAsList();
|
|
|
|
assertEquals(3, mergedIn.size());
|
|
|
|
mergedIn.forEach(r -> {
|
|
|
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
|
|
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
|
|
|
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
|
|
|
assertTrue(dups.contains(r.getSource()));
|
|
|
|
});
|
|
|
|
|
|
|
|
assertEquals(1268, orgs_mergerel);
|
|
|
|
assertEquals(1112, pubs.count());
|
|
|
|
assertEquals(292, sw_mergerel);
|
|
|
|
assertEquals(476, ds_mergerel);
|
|
|
|
assertEquals(742, orp_mergerel);
|
|
|
|
// System.out.println("orgs_mergerel = " + orgs_mergerel);
|
|
|
|
// System.out.println("pubs_mergerel = " + pubs_mergerel);
|
|
|
|
// System.out.println("sw_mergerel = " + sw_mergerel);
|
|
|
|
// System.out.println("ds_mergerel = " + ds_mergerel);
|
|
|
|
// System.out.println("orp_mergerel = " + orp_mergerel);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
@Test
|
|
|
|
@Order(4)
|
|
|
|
void createMergeRelsTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i",
|
|
|
|
testGraphBasePath,
|
|
|
|
"-asi",
|
|
|
|
testActionSetId,
|
|
|
|
"-la",
|
|
|
|
"lookupurl",
|
|
|
|
"-w",
|
2023-10-02 09:25:12 +02:00
|
|
|
testOutputBasePath,
|
|
|
|
"-h",
|
|
|
|
""
|
2021-10-07 15:39:55 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long orgs_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
|
|
|
.count();
|
2022-11-11 16:10:25 +01:00
|
|
|
final Dataset<Relation> pubs = spark
|
2021-10-07 15:39:55 +02:00
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
2022-11-11 16:10:25 +01:00
|
|
|
.as(Encoders.bean(Relation.class));
|
2021-10-07 15:39:55 +02:00
|
|
|
long sw_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
|
|
|
.count();
|
|
|
|
long ds_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long orp_mergerel = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
|
|
|
.count();
|
|
|
|
|
2022-11-11 16:10:25 +01:00
|
|
|
final List<Relation> merges = pubs
|
|
|
|
.filter("source == '50|doi_dedup___::d5021b53204e4fdeab6ff5d5bc468032'")
|
|
|
|
.collectAsList();
|
|
|
|
assertEquals(3, merges.size());
|
|
|
|
Set<String> dups = Sets
|
|
|
|
.newHashSet(
|
|
|
|
"50|doi_________::3b1d0d8e8f930826665df9d6b82fbb73",
|
|
|
|
"50|doi_________::d5021b53204e4fdeab6ff5d5bc468032",
|
|
|
|
"50|arXiv_______::c93aeb433eb90ed7a86e29be00791b7c");
|
|
|
|
merges.forEach(r -> {
|
|
|
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
|
|
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
|
|
|
assertEquals(ModelConstants.MERGES, r.getRelClass());
|
|
|
|
assertTrue(dups.contains(r.getTarget()));
|
|
|
|
});
|
|
|
|
|
|
|
|
final List<Relation> mergedIn = pubs
|
|
|
|
.filter("target == '50|doi_dedup___::d5021b53204e4fdeab6ff5d5bc468032'")
|
|
|
|
.collectAsList();
|
|
|
|
assertEquals(3, mergedIn.size());
|
|
|
|
mergedIn.forEach(r -> {
|
|
|
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
|
|
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
|
|
|
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
|
|
|
assertTrue(dups.contains(r.getSource()));
|
|
|
|
});
|
|
|
|
|
2022-03-15 16:33:03 +01:00
|
|
|
assertEquals(1268, orgs_mergerel);
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(1112, pubs.count());
|
|
|
|
assertEquals(292, sw_mergerel);
|
|
|
|
assertEquals(476, ds_mergerel);
|
|
|
|
assertEquals(742, orp_mergerel);
|
2022-03-15 16:33:03 +01:00
|
|
|
// System.out.println("orgs_mergerel = " + orgs_mergerel);
|
|
|
|
// System.out.println("pubs_mergerel = " + pubs_mergerel);
|
|
|
|
// System.out.println("sw_mergerel = " + sw_mergerel);
|
|
|
|
// System.out.println("ds_mergerel = " + ds_mergerel);
|
|
|
|
// System.out.println("orp_mergerel = " + orp_mergerel);
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
@Order(5)
|
|
|
|
void createDedupRecordTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/createDedupRecord_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i",
|
|
|
|
testGraphBasePath,
|
|
|
|
"-asi",
|
|
|
|
testActionSetId,
|
|
|
|
"-la",
|
|
|
|
"lookupurl",
|
|
|
|
"-w",
|
|
|
|
testOutputBasePath
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCreateDedupRecord(parser, spark).run(isLookUpService);
|
|
|
|
|
2022-11-11 16:10:25 +01:00
|
|
|
final ObjectMapper mapper = new ObjectMapper()
|
|
|
|
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
|
|
|
|
|
|
|
final Dataset<Publication> pubs = spark
|
|
|
|
.read()
|
|
|
|
.textFile(testOutputBasePath + "/" + testActionSetId + "/publication_deduprecord")
|
|
|
|
.map(
|
|
|
|
(MapFunction<String, Publication>) value -> mapper.readValue(value, Publication.class),
|
|
|
|
Encoders.bean(Publication.class));
|
2021-10-07 15:39:55 +02:00
|
|
|
long orgs_deduprecord = jsc
|
|
|
|
.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_deduprecord")
|
|
|
|
.count();
|
|
|
|
long sw_deduprecord = jsc
|
|
|
|
.textFile(testOutputBasePath + "/" + testActionSetId + "/software_deduprecord")
|
|
|
|
.count();
|
|
|
|
long ds_deduprecord = jsc.textFile(testOutputBasePath + "/" + testActionSetId + "/dataset_deduprecord").count();
|
|
|
|
long orp_deduprecord = jsc
|
|
|
|
.textFile(
|
|
|
|
testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_deduprecord")
|
|
|
|
.count();
|
|
|
|
|
2022-03-15 16:33:03 +01:00
|
|
|
assertEquals(86, orgs_deduprecord);
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(91, pubs.count());
|
|
|
|
assertEquals(47, sw_deduprecord);
|
2021-10-07 15:39:55 +02:00
|
|
|
assertEquals(97, ds_deduprecord);
|
2023-12-22 09:57:30 +01:00
|
|
|
assertEquals(92, orp_deduprecord);
|
2022-03-15 16:33:03 +01:00
|
|
|
|
2022-11-11 16:10:25 +01:00
|
|
|
verifyRoot_1(mapper, pubs);
|
|
|
|
|
2022-03-15 16:33:03 +01:00
|
|
|
// System.out.println("orgs_deduprecord = " + orgs_deduprecord);
|
|
|
|
// System.out.println("pubs_deduprecord = " + pubs_deduprecord);
|
|
|
|
// System.out.println("sw_deduprecord = " + sw_deduprecord);
|
|
|
|
// System.out.println("ds_deduprecord = " + ds_deduprecord);
|
|
|
|
// System.out.println("orp_deduprecord = " + orp_deduprecord);
|
2021-10-07 15:39:55 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 16:10:25 +01:00
|
|
|
private static void verifyRoot_1(ObjectMapper mapper, Dataset<Publication> pubs) {
|
|
|
|
Publication root = pubs
|
|
|
|
.filter("id = '50|doi_dedup___::d5021b53204e4fdeab6ff5d5bc468032'")
|
|
|
|
.first();
|
|
|
|
assertNotNull(root);
|
|
|
|
|
|
|
|
final Dataset<String> publication = spark
|
|
|
|
.read()
|
|
|
|
.textFile(DedupUtility.createEntityPath(testGraphBasePath, "publication"));
|
|
|
|
|
|
|
|
Publication crossref_duplicate = publication
|
|
|
|
.map(
|
|
|
|
(MapFunction<String, Publication>) value -> mapper.readValue(value, Publication.class),
|
|
|
|
Encoders.bean(Publication.class))
|
|
|
|
.filter("id = '50|doi_________::d5021b53204e4fdeab6ff5d5bc468032'")
|
|
|
|
.collectAsList()
|
|
|
|
.get(0);
|
|
|
|
|
|
|
|
assertEquals(crossref_duplicate.getJournal().getName(), root.getJournal().getName());
|
|
|
|
assertEquals(crossref_duplicate.getJournal().getIssnPrinted(), root.getJournal().getIssnPrinted());
|
|
|
|
assertEquals(crossref_duplicate.getPublisher().getValue(), root.getPublisher().getValue());
|
|
|
|
|
|
|
|
Set<String> rootPids = root
|
|
|
|
.getPid()
|
|
|
|
.stream()
|
|
|
|
.map(StructuredProperty::getValue)
|
|
|
|
.collect(Collectors.toCollection(HashSet::new));
|
|
|
|
Set<String> dupPids = crossref_duplicate
|
|
|
|
.getPid()
|
|
|
|
.stream()
|
|
|
|
.map(StructuredProperty::getValue)
|
|
|
|
.collect(Collectors.toCollection(HashSet::new));
|
|
|
|
|
|
|
|
assertFalse(Sets.intersection(rootPids, dupPids).isEmpty());
|
|
|
|
assertTrue(rootPids.contains("10.1109/jstqe.2022.3205716"));
|
|
|
|
|
|
|
|
Optional<Instance> instance_cr = root
|
|
|
|
.getInstance()
|
|
|
|
.stream()
|
|
|
|
.filter(i -> i.getCollectedfrom().getValue().equals("Crossref"))
|
|
|
|
.findFirst();
|
|
|
|
assertTrue(instance_cr.isPresent());
|
|
|
|
assertEquals("OPEN", instance_cr.get().getAccessright().getClassid());
|
|
|
|
assertEquals("Open Access", instance_cr.get().getAccessright().getClassname());
|
|
|
|
assertEquals(OpenAccessRoute.hybrid, instance_cr.get().getAccessright().getOpenAccessRoute());
|
|
|
|
assertEquals(
|
|
|
|
"IEEE Journal of Selected Topics in Quantum Electronics", instance_cr.get().getHostedby().getValue());
|
|
|
|
assertEquals("0001", instance_cr.get().getInstancetype().getClassid());
|
|
|
|
assertEquals("Article", instance_cr.get().getInstancetype().getClassname());
|
|
|
|
}
|
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
@Test
|
|
|
|
@Order(6)
|
|
|
|
void updateEntityTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkUpdateEntity(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
long organizations = jsc.textFile(testDedupGraphBasePath + "/organization").count();
|
|
|
|
long publications = jsc.textFile(testDedupGraphBasePath + "/publication").count();
|
|
|
|
long projects = jsc.textFile(testDedupGraphBasePath + "/project").count();
|
|
|
|
long datasource = jsc.textFile(testDedupGraphBasePath + "/datasource").count();
|
|
|
|
long softwares = jsc.textFile(testDedupGraphBasePath + "/software").count();
|
|
|
|
long dataset = jsc.textFile(testDedupGraphBasePath + "/dataset").count();
|
|
|
|
long otherresearchproduct = jsc.textFile(testDedupGraphBasePath + "/otherresearchproduct").count();
|
|
|
|
|
|
|
|
long mergedOrgs = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass=='merges'")
|
|
|
|
.javaRDD()
|
|
|
|
.map(Relation::getTarget)
|
|
|
|
.distinct()
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long mergedPubs = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass=='merges'")
|
|
|
|
.javaRDD()
|
|
|
|
.map(Relation::getTarget)
|
|
|
|
.distinct()
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long mergedSw = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/software_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass=='merges'")
|
|
|
|
.javaRDD()
|
|
|
|
.map(Relation::getTarget)
|
|
|
|
.distinct()
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long mergedDs = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/dataset_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass=='merges'")
|
|
|
|
.javaRDD()
|
|
|
|
.map(Relation::getTarget)
|
|
|
|
.distinct()
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long mergedOrp = spark
|
|
|
|
.read()
|
|
|
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_mergerel")
|
|
|
|
.as(Encoders.bean(Relation.class))
|
|
|
|
.where("relClass=='merges'")
|
|
|
|
.javaRDD()
|
|
|
|
.map(Relation::getTarget)
|
|
|
|
.distinct()
|
|
|
|
.count();
|
|
|
|
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(925, publications);
|
2022-03-15 16:33:03 +01:00
|
|
|
assertEquals(839, organizations);
|
2021-10-07 15:39:55 +02:00
|
|
|
assertEquals(100, projects);
|
|
|
|
assertEquals(100, datasource);
|
2023-10-02 09:25:12 +02:00
|
|
|
assertEquals(196, softwares);
|
2021-10-07 15:39:55 +02:00
|
|
|
assertEquals(389, dataset);
|
2023-12-22 09:57:30 +01:00
|
|
|
assertEquals(520, otherresearchproduct);
|
2022-03-15 16:33:03 +01:00
|
|
|
|
|
|
|
// System.out.println("publications = " + publications);
|
|
|
|
// System.out.println("organizations = " + organizations);
|
|
|
|
// System.out.println("projects = " + projects);
|
|
|
|
// System.out.println("datasource = " + datasource);
|
|
|
|
// System.out.println("software = " + softwares);
|
|
|
|
// System.out.println("dataset = " + dataset);
|
|
|
|
// System.out.println("otherresearchproduct = " + otherresearchproduct);
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
long deletedOrgs = jsc
|
|
|
|
.textFile(testDedupGraphBasePath + "/organization")
|
|
|
|
.filter(this::isDeletedByInference)
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long deletedPubs = jsc
|
|
|
|
.textFile(testDedupGraphBasePath + "/publication")
|
|
|
|
.filter(this::isDeletedByInference)
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long deletedSw = jsc
|
|
|
|
.textFile(testDedupGraphBasePath + "/software")
|
|
|
|
.filter(this::isDeletedByInference)
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long deletedDs = jsc
|
|
|
|
.textFile(testDedupGraphBasePath + "/dataset")
|
|
|
|
.filter(this::isDeletedByInference)
|
|
|
|
.count();
|
|
|
|
|
|
|
|
long deletedOrp = jsc
|
|
|
|
.textFile(testDedupGraphBasePath + "/otherresearchproduct")
|
|
|
|
.filter(this::isDeletedByInference)
|
|
|
|
.count();
|
|
|
|
|
|
|
|
assertEquals(mergedOrgs, deletedOrgs);
|
|
|
|
assertEquals(mergedPubs, deletedPubs);
|
|
|
|
assertEquals(mergedSw, deletedSw);
|
|
|
|
assertEquals(mergedDs, deletedDs);
|
|
|
|
assertEquals(mergedOrp, deletedOrp);
|
|
|
|
}
|
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
@Test
|
|
|
|
@Order(6)
|
|
|
|
void copyRelationsNoOpenorgsTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils
|
|
|
|
.toString(
|
|
|
|
SparkCopyRelationsNoOpenorgs.class
|
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/oa/dedup/updateEntity_parameters.json")));
|
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
|
|
|
"-i", testGraphBasePath, "-w", testOutputBasePath, "-o", testDedupGraphBasePath
|
|
|
|
});
|
|
|
|
|
|
|
|
new SparkCopyRelationsNoOpenorgs(parser, spark).run(isLookUpService);
|
|
|
|
|
|
|
|
final Dataset<Row> outputRels = spark.read().text(testDedupGraphBasePath + "/relation");
|
|
|
|
|
|
|
|
System.out.println(outputRels.count());
|
|
|
|
// assertEquals(2382, outputRels.count());
|
|
|
|
}
|
|
|
|
|
2021-10-07 15:39:55 +02:00
|
|
|
@Test
|
|
|
|
@Order(7)
|
|
|
|
void propagateRelationTest() throws Exception {
|
|
|
|
|
|
|
|
ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
2022-11-11 16:10:25 +01:00
|
|
|
classPathResourceAsString("/eu/dnetlib/dhp/oa/dedup/propagateRelation_parameters.json"));
|
2021-10-07 15:39:55 +02:00
|
|
|
parser
|
|
|
|
.parseArgument(
|
|
|
|
new String[] {
|
2023-09-01 09:32:57 +02:00
|
|
|
"-i", testDedupGraphBasePath, "-w", testOutputBasePath, "-o", testConsistencyGraphBasePath
|
2021-10-07 15:39:55 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
new SparkPropagateRelation(parser, spark).run(isLookUpService);
|
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
|
2021-10-07 15:39:55 +02:00
|
|
|
|
2022-03-15 16:33:03 +01:00
|
|
|
// assertEquals(4860, relations);
|
|
|
|
System.out.println("relations = " + relations);
|
2021-10-07 15:39:55 +02:00
|
|
|
|
|
|
|
// check deletedbyinference
|
|
|
|
final Dataset<Relation> mergeRels = spark
|
|
|
|
.read()
|
|
|
|
.load(DedupUtility.createMergeRelPath(testOutputBasePath, "*", "*"))
|
|
|
|
.as(Encoders.bean(Relation.class));
|
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
Dataset<Row> inputRels = spark
|
2023-08-08 15:52:20 +02:00
|
|
|
.read()
|
2023-09-01 09:32:57 +02:00
|
|
|
.json(testDedupGraphBasePath + "/relation");
|
2023-08-04 14:27:39 +02:00
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
Dataset<Row> outputRels = spark
|
|
|
|
.read()
|
|
|
|
.json(testConsistencyGraphBasePath + "/relation");
|
2023-08-04 14:27:39 +02:00
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
assertEquals(
|
|
|
|
0, outputRels
|
|
|
|
.filter("dataInfo.deletedbyinference == true OR dataInfo.invisible == true")
|
|
|
|
.count());
|
2023-08-04 14:27:39 +02:00
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
assertEquals(
|
|
|
|
5, outputRels
|
|
|
|
.filter("relClass NOT IN ('merges', 'isMergedIn')")
|
|
|
|
.count());
|
2023-08-04 14:27:39 +02:00
|
|
|
|
2023-09-01 09:32:57 +02:00
|
|
|
assertEquals(5 + mergeRels.count(), outputRels.count());
|
2023-08-04 14:27:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
2023-09-01 09:32:57 +02:00
|
|
|
@Order(8)
|
|
|
|
void testCleanedPropagatedRelations() throws Exception {
|
|
|
|
Dataset<Row> df_before = spark
|
|
|
|
.read()
|
|
|
|
.schema(Encoders.bean(Relation.class).schema())
|
|
|
|
.json(testDedupGraphBasePath + "/relation");
|
2023-08-04 14:27:39 +02:00
|
|
|
|
2023-08-08 15:52:20 +02:00
|
|
|
Dataset<Row> df_after = spark
|
|
|
|
.read()
|
|
|
|
.schema(Encoders.bean(Relation.class).schema())
|
2023-09-01 09:32:57 +02:00
|
|
|
.json(testConsistencyGraphBasePath + "/relation");
|
2023-08-04 14:27:39 +02:00
|
|
|
|
|
|
|
assertNotEquals(df_before.count(), df_after.count());
|
2023-09-01 09:32:57 +02:00
|
|
|
|
|
|
|
assertEquals(
|
|
|
|
0, df_after
|
|
|
|
.filter("dataInfo.deletedbyinference == true OR dataInfo.invisible == true")
|
|
|
|
.count());
|
|
|
|
|
|
|
|
assertEquals(
|
|
|
|
5, df_after
|
|
|
|
.filter("relClass NOT IN ('merges', 'isMergedIn')")
|
|
|
|
.count());
|
2023-08-04 14:27:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
@Order(10)
|
2021-10-07 15:39:55 +02:00
|
|
|
void testRelations() throws Exception {
|
|
|
|
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_1.json", 12, 10);
|
|
|
|
testUniqueness("/eu/dnetlib/dhp/dedup/test/relation_2.json", 10, 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
private void testUniqueness(String path, int expected_total, int expected_unique) {
|
|
|
|
Dataset<Relation> rel = spark
|
|
|
|
.read()
|
|
|
|
.textFile(getClass().getResource(path).getPath())
|
|
|
|
.map(
|
|
|
|
(MapFunction<String, Relation>) s -> new ObjectMapper().readValue(s, Relation.class),
|
|
|
|
Encoders.bean(Relation.class));
|
|
|
|
|
|
|
|
assertEquals(expected_total, rel.count());
|
|
|
|
assertEquals(expected_unique, rel.distinct().count());
|
|
|
|
}
|
|
|
|
|
|
|
|
@AfterAll
|
|
|
|
public static void finalCleanUp() throws IOException {
|
|
|
|
FileUtils.deleteDirectory(new File(testOutputBasePath));
|
|
|
|
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
|
2023-09-01 09:32:57 +02:00
|
|
|
FileUtils.deleteDirectory(new File(testConsistencyGraphBasePath));
|
2021-10-07 15:39:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public boolean isDeletedByInference(String s) {
|
|
|
|
return s.contains("\"deletedbyinference\":true");
|
|
|
|
}
|
2022-11-11 16:10:25 +01:00
|
|
|
|
|
|
|
private static String classPathResourceAsString(String path) throws IOException {
|
|
|
|
return IOUtils
|
|
|
|
.toString(
|
|
|
|
SparkDedupTest.class
|
|
|
|
.getResourceAsStream(path));
|
|
|
|
}
|
|
|
|
|
2020-04-20 18:44:06 +02:00
|
|
|
}
|