2020-04-30 11:05:17 +02:00
|
|
|
|
2020-04-14 15:28:00 +02:00
|
|
|
package eu.dnetlib.dhp.projecttoresult;
|
|
|
|
|
2020-04-27 10:58:41 +02:00
|
|
|
import java.io.IOException;
|
|
|
|
import java.nio.file.Files;
|
|
|
|
import java.nio.file.Path;
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2020-04-14 15:28:00 +02:00
|
|
|
import org.apache.commons.io.FileUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
2020-05-11 17:32:06 +02:00
|
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
2020-04-14 15:28:00 +02:00
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.junit.jupiter.api.AfterAll;
|
|
|
|
import org.junit.jupiter.api.Assertions;
|
|
|
|
import org.junit.jupiter.api.BeforeAll;
|
|
|
|
import org.junit.jupiter.api.Test;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
|
|
|
2020-04-14 15:28:00 +02:00
|
|
|
public class ProjectPropagationJobTest {
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(ProjectPropagationJobTest.class);
|
|
|
|
|
|
|
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
|
|
|
|
|
|
private static SparkSession spark;
|
|
|
|
|
|
|
|
private static Path workingDir;
|
2022-11-23 09:54:50 +01:00
|
|
|
private static final SparkConf conf = new SparkConf();
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
@BeforeAll
|
|
|
|
public static void beforeAll() throws IOException {
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
log.info("using work dir {}", workingDir);
|
|
|
|
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
conf.setAppName(ProjectPropagationJobTest.class.getSimpleName());
|
|
|
|
|
|
|
|
conf.setMaster("local[*]");
|
|
|
|
conf.set("spark.driver.host", "localhost");
|
|
|
|
conf.set("hive.metastore.local", "true");
|
|
|
|
conf.set("spark.ui.enabled", "false");
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
spark = SparkSession
|
2022-11-23 09:54:50 +01:00
|
|
|
.builder()
|
|
|
|
.appName(ProjectPropagationJobTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
@AfterAll
|
|
|
|
public static void afterAll() throws IOException {
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
spark.stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* There are no new relations to be added. All the possible relations have already been linked with the project in
|
|
|
|
* the graph
|
|
|
|
*
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
@Test
|
2021-08-11 12:13:22 +02:00
|
|
|
void NoUpdateTest() throws Exception {
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2022-11-23 09:54:50 +01:00
|
|
|
workingDir = Files.createTempDirectory(ProjectPropagationJobTest.class.getSimpleName());
|
2020-05-11 17:32:06 +02:00
|
|
|
final String potentialUpdateDate = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/noupdates/potentialUpdates")
|
|
|
|
.getPath();
|
2020-05-11 17:32:06 +02:00
|
|
|
final String alreadyLinkedPath = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/alreadyLinked")
|
|
|
|
.getPath();
|
|
|
|
SparkResultToProjectThroughSemRelJob
|
|
|
|
.main(
|
2020-04-30 11:05:17 +02:00
|
|
|
new String[] {
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
|
|
"-hive_metastore_uris", "",
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-outputPath", workingDir.toString() + "/relation",
|
|
|
|
"-potentialUpdatePath", potentialUpdateDate,
|
|
|
|
"-alreadyLinkedPath", alreadyLinkedPath,
|
2020-04-30 11:05:17 +02:00
|
|
|
});
|
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
JavaRDD<Relation> tmp = sc
|
|
|
|
.textFile(workingDir.toString() + "/relation")
|
|
|
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
|
|
|
|
|
|
|
Assertions.assertEquals(0, tmp.count());
|
2022-11-23 09:54:50 +01:00
|
|
|
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
|
|
|
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* All the possible updates will produce a new relation. No relations are already linked in the grpha
|
|
|
|
*
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
@Test
|
2021-08-11 12:13:22 +02:00
|
|
|
void UpdateTenTest() throws Exception {
|
2022-11-23 09:54:50 +01:00
|
|
|
workingDir = Files.createTempDirectory(ProjectPropagationJobTest.class.getSimpleName());
|
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(ProjectPropagationJobTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
2020-05-11 17:32:06 +02:00
|
|
|
final String potentialUpdatePath = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/tenupdates/potentialUpdates")
|
|
|
|
.getPath();
|
2020-05-11 17:32:06 +02:00
|
|
|
final String alreadyLinkedPath = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/alreadyLinked")
|
|
|
|
.getPath();
|
|
|
|
SparkResultToProjectThroughSemRelJob
|
|
|
|
.main(
|
2020-04-30 11:05:17 +02:00
|
|
|
new String[] {
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
|
|
"-hive_metastore_uris", "",
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-outputPath", workingDir.toString() + "/relation",
|
|
|
|
"-potentialUpdatePath", potentialUpdatePath,
|
|
|
|
"-alreadyLinkedPath", alreadyLinkedPath,
|
2020-04-30 11:05:17 +02:00
|
|
|
});
|
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
JavaRDD<Relation> tmp = sc
|
|
|
|
.textFile(workingDir.toString() + "/relation")
|
|
|
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
|
|
|
|
|
|
|
// got 20 new relations because "produces" and "isProducedBy" are added
|
|
|
|
Assertions.assertEquals(10, tmp.count());
|
|
|
|
|
|
|
|
Dataset<Relation> verificationDs = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
|
|
|
|
|
|
|
Assertions.assertEquals(5, verificationDs.filter("relClass = 'produces'").count());
|
|
|
|
Assertions.assertEquals(5, verificationDs.filter("relClass = 'isProducedBy'").count());
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
5,
|
|
|
|
verificationDs
|
2020-05-11 17:38:08 +02:00
|
|
|
.filter(
|
|
|
|
(FilterFunction<Relation>) r -> r.getSource().startsWith("50")
|
2020-05-11 17:32:06 +02:00
|
|
|
&& r.getTarget().startsWith("40")
|
2020-04-30 11:05:17 +02:00
|
|
|
&& r.getRelClass().equals("isProducedBy"))
|
|
|
|
.count());
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
5,
|
|
|
|
verificationDs
|
2020-05-11 17:38:08 +02:00
|
|
|
.filter(
|
|
|
|
(FilterFunction<Relation>) r -> r.getSource().startsWith("40")
|
2020-05-11 17:32:06 +02:00
|
|
|
&& r.getTarget().startsWith("50")
|
2020-04-30 11:05:17 +02:00
|
|
|
&& r.getRelClass().equals("produces"))
|
|
|
|
.count());
|
|
|
|
|
|
|
|
verificationDs.createOrReplaceTempView("temporary");
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
10,
|
|
|
|
spark
|
|
|
|
.sql(
|
|
|
|
"Select * from temporary where datainfo.inferenceprovenance = 'propagation'")
|
|
|
|
.count());
|
2022-11-23 09:54:50 +01:00
|
|
|
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* One of the relations in the possible updates is already linked to the project in the graph. All the others are
|
|
|
|
* not. There will be 9 new associations leading to 18 new relations
|
|
|
|
*
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
|
|
|
@Test
|
2021-08-11 12:13:22 +02:00
|
|
|
void UpdateMixTest() throws Exception {
|
2022-11-23 09:54:50 +01:00
|
|
|
workingDir = Files.createTempDirectory(ProjectPropagationJobTest.class.getSimpleName());
|
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(ProjectPropagationJobTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
2020-05-11 17:32:06 +02:00
|
|
|
final String potentialUpdatepath = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/updatesmixed/potentialUpdates")
|
|
|
|
.getPath();
|
2020-05-11 17:32:06 +02:00
|
|
|
final String alreadyLinkedPath = getClass()
|
2020-05-11 17:38:08 +02:00
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/projecttoresult/preparedInfo/alreadyLinked")
|
|
|
|
.getPath();
|
|
|
|
SparkResultToProjectThroughSemRelJob
|
|
|
|
.main(
|
2020-04-30 11:05:17 +02:00
|
|
|
new String[] {
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
|
|
"-hive_metastore_uris", "",
|
2022-11-23 09:54:50 +01:00
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
"-outputPath", workingDir.toString() + "/relation",
|
|
|
|
"-potentialUpdatePath", potentialUpdatepath,
|
|
|
|
"-alreadyLinkedPath", alreadyLinkedPath,
|
2020-04-30 11:05:17 +02:00
|
|
|
});
|
|
|
|
|
2020-05-11 17:32:06 +02:00
|
|
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
JavaRDD<Relation> tmp = sc
|
|
|
|
.textFile(workingDir.toString() + "/relation")
|
|
|
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
|
|
|
|
|
|
|
// JavaRDD<Relation> tmp = sc.textFile("/tmp/relation")
|
|
|
|
// .map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
|
|
|
|
|
|
|
// got 20 new relations because "produces" and "isProducedBy" are added
|
|
|
|
Assertions.assertEquals(8, tmp.count());
|
|
|
|
|
|
|
|
Dataset<Relation> verificationDs = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
|
|
|
|
|
|
|
Assertions.assertEquals(4, verificationDs.filter("relClass = 'produces'").count());
|
|
|
|
Assertions.assertEquals(4, verificationDs.filter("relClass = 'isProducedBy'").count());
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
4,
|
|
|
|
verificationDs
|
2020-05-11 17:38:08 +02:00
|
|
|
.filter(
|
|
|
|
(FilterFunction<Relation>) r -> r.getSource().startsWith("50")
|
2020-05-11 17:32:06 +02:00
|
|
|
&& r.getTarget().startsWith("40")
|
2020-04-30 11:05:17 +02:00
|
|
|
&& r.getRelClass().equals("isProducedBy"))
|
|
|
|
.count());
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
4,
|
|
|
|
verificationDs
|
2020-05-11 17:38:08 +02:00
|
|
|
.filter(
|
|
|
|
(FilterFunction<Relation>) r -> r.getSource().startsWith("40")
|
2020-05-11 17:32:06 +02:00
|
|
|
&& r.getTarget().startsWith("50")
|
2020-04-30 11:05:17 +02:00
|
|
|
&& r.getRelClass().equals("produces"))
|
|
|
|
.count());
|
|
|
|
|
|
|
|
verificationDs.createOrReplaceTempView("temporary");
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
8,
|
|
|
|
spark
|
|
|
|
.sql(
|
|
|
|
"Select * from temporary where datainfo.inferenceprovenance = 'propagation'")
|
|
|
|
.count());
|
2022-11-23 09:54:50 +01:00
|
|
|
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
2020-04-14 15:28:00 +02:00
|
|
|
}
|