Revise propagation tests

This commit is contained in:
Giambattista Bloisi 2024-12-17 16:01:03 +01:00
parent d095b31ea8
commit 71fe0374dc
21 changed files with 80 additions and 78 deletions

View File

@ -72,7 +72,7 @@ abstract class SparkEnrichWithOrcidAuthors(propertyPath: String, args: Array[Str
def createTemporaryData(spark: SparkSession, graphPath: String, orcidPath: String, targetPath: String): Unit def createTemporaryData(spark: SparkSession, graphPath: String, orcidPath: String, targetPath: String): Unit
private def analisys(targetPath: String, classid:String, provenance:String): Unit = { private def analisys(targetPath: String, classid: String, provenance: String): Unit = {
ModelSupport.entityTypes.asScala ModelSupport.entityTypes.asScala
.filter(e => ModelSupport.isResult(e._1)) .filter(e => ModelSupport.isResult(e._1))
.foreach(e => { .foreach(e => {
@ -94,4 +94,3 @@ abstract class SparkEnrichWithOrcidAuthors(propertyPath: String, args: Array[Str
}) })
} }
} }

View File

@ -21,9 +21,13 @@ class DecisionTreeTest {
void testJPath() throws IOException { void testJPath() throws IOException {
DedupConfig conf = DedupConfig DedupConfig conf = DedupConfig
.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/dedup_conf_organization.json"))); .load(
IOUtils
.toString(
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/dedup_conf_organization.json")));
final String org = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization.json")); final String org = IOUtils
.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization.json"));
Row row = SparkModel.apply(conf).rowFromJson(org); Row row = SparkModel.apply(conf).rowFromJson(org);
@ -42,7 +46,8 @@ class DecisionTreeTest {
.getResourceAsStream( .getResourceAsStream(
"/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json"))); "/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")));
final String org = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization_example1.json")); final String org = IOUtils
.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization_example1.json"));
Row row = SparkModel.apply(conf).rowFromJson(org); Row row = SparkModel.apply(conf).rowFromJson(org);
// to check that the same parsing returns the same row // to check that the same parsing returns the same row

View File

@ -440,7 +440,8 @@ public class SparkDedupTest implements Serializable {
.count(); .count();
final List<Relation> merges = pubs final List<Relation> merges = pubs
.filter("source == '50|doi_dedup___::d5021b53204e4fdeab6ff5d5bc468032'")// and relClass = '"+ModelConstants.MERGES+"'") .filter("source == '50|doi_dedup___::d5021b53204e4fdeab6ff5d5bc468032'")// and relClass =
// '"+ModelConstants.MERGES+"'")
.collectAsList(); .collectAsList();
assertEquals(4, merges.size()); assertEquals(4, merges.size());
Set<String> dups = Sets Set<String> dups = Sets

View File

@ -19,9 +19,13 @@ class JsonPathTest {
void testJPath() throws IOException { void testJPath() throws IOException {
DedupConfig conf = DedupConfig DedupConfig conf = DedupConfig
.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/dedup_conf_organization.json"))); .load(
IOUtils
.toString(
getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/dedup_conf_organization.json")));
final String org = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization.json")); final String org = IOUtils
.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/jpath/organization.json"));
Row row = SparkModel.apply(conf).rowFromJson(org); Row row = SparkModel.apply(conf).rowFromJson(org);

View File

@ -1,10 +1,11 @@
package eu.dnetlib.dhp.orcidtoresultfromsemrel;
import eu.dnetlib.dhp.utils.OrcidAuthor; package eu.dnetlib.dhp.orcidtoresultfromsemrel;
import java.io.Serializable; import java.io.Serializable;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.utils.OrcidAuthor;
public class OrcidAuthors implements Serializable { public class OrcidAuthors implements Serializable {
List<OrcidAuthor> orcidAuthorList; List<OrcidAuthor> orcidAuthorList;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.orcidtoresultfromsemrel; package eu.dnetlib.dhp.orcidtoresultfromsemrel;
import java.util.List; import java.util.List;

View File

@ -7,6 +7,7 @@ import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -33,7 +34,7 @@ public class CountryPropagationJobTest {
private static final Logger log = LoggerFactory.getLogger(CountryPropagationJobTest.class); private static final Logger log = LoggerFactory.getLogger(CountryPropagationJobTest.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static SparkSession spark; private static SparkSession spark;

View File

@ -5,6 +5,7 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -19,7 +20,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
public class DatasourceCountryPreparationTest { public class DatasourceCountryPreparationTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static SparkSession spark; private static SparkSession spark;

View File

@ -71,23 +71,24 @@ public class OrcidPropagationJobTest {
.getResource( .getResource(
"/eu/dnetlib/dhp/orcidtoresultfromsemrel/preparedInfo/mergedOrcidAssoc") "/eu/dnetlib/dhp/orcidtoresultfromsemrel/preparedInfo/mergedOrcidAssoc")
.getPath(); .getPath();
SparkOrcidToResultFromSemRelJob SparkPropagateOrcidAuthor
.main( .main(
new String[] { new String[] {
"-isTest", Boolean.TRUE.toString(), "-graphPath",
"-isSparkSessionManaged", Boolean.FALSE.toString(), getClass()
"-sourcePath", sourcePath, .getResource(
"-hive_metastore_uris", "", "/eu/dnetlib/dhp/orcidtoresultfromsemrel/sample/noupdate")
"-saveGraph", "true", .getPath(),
"-resultTableName", Dataset.class.getCanonicalName(), "-targetPath",
"-outputPath", workingDir.toString() + "/dataset", workingDir.toString() + "/graph",
"-possibleUpdatesPath", possibleUpdatesPath "-orcidPath", "",
"-workingDir", workingDir.toString()
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Dataset> tmp = sc JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/dataset") .textFile(workingDir.toString() + "/graph/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
// tmp.map(s -> new Gson().toJson(s)).foreach(s -> System.out.println(s)); // tmp.map(s -> new Gson().toJson(s)).foreach(s -> System.out.println(s));
@ -110,36 +111,24 @@ public class OrcidPropagationJobTest {
@Test @Test
void oneUpdateTest() throws Exception { void oneUpdateTest() throws Exception {
SparkOrcidToResultFromSemRelJob SparkPropagateOrcidAuthor
.main( .main(
new String[] { new String[] {
"-isTest", "-graphPath",
Boolean.TRUE.toString(),
"-isSparkSessionManaged",
Boolean.FALSE.toString(),
"-sourcePath",
getClass()
.getResource("/eu/dnetlib/dhp/orcidtoresultfromsemrel/sample/oneupdate")
.getPath(),
"-hive_metastore_uris",
"",
"-saveGraph",
"true",
"-resultTableName",
"eu.dnetlib.dhp.schema.oaf.Dataset",
"-outputPath",
workingDir.toString() + "/dataset",
"-possibleUpdatesPath",
getClass() getClass()
.getResource( .getResource(
"/eu/dnetlib/dhp/orcidtoresultfromsemrel/preparedInfo/mergedOrcidAssoc") "/eu/dnetlib/dhp/orcidtoresultfromsemrel/sample/oneupdate")
.getPath() .getPath(),
"-targetPath",
workingDir.toString() + "/graph",
"-orcidPath", "",
"-workingDir", workingDir.toString()
}); });
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
JavaRDD<Dataset> tmp = sc JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/dataset") .textFile(workingDir.toString() + "/graph/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
// tmp.map(s -> new Gson().toJson(s)).foreach(s -> System.out.println(s)); // tmp.map(s -> new Gson().toJson(s)).foreach(s -> System.out.println(s));
@ -177,31 +166,18 @@ public class OrcidPropagationJobTest {
@Test @Test
void twoUpdatesTest() throws Exception { void twoUpdatesTest() throws Exception {
SparkOrcidToResultFromSemRelJob SparkPropagateOrcidAuthor
.main( .main(
new String[] { new String[] {
"-isTest", "-graphPath",
Boolean.TRUE.toString(),
"-isSparkSessionManaged",
Boolean.FALSE.toString(),
"-sourcePath",
getClass() getClass()
.getResource( .getResource(
"/eu/dnetlib/dhp/orcidtoresultfromsemrel/sample/twoupdates") "/eu/dnetlib/dhp/orcidtoresultfromsemrel/sample/twoupdates")
.getPath(), .getPath(),
"-hive_metastore_uris", "-targetPath",
"",
"-saveGraph",
"true",
"-resultTableName",
"eu.dnetlib.dhp.schema.oaf.Dataset",
"-outputPath",
workingDir.toString() + "/dataset", workingDir.toString() + "/dataset",
"-possibleUpdatesPath", "-orcidPath", "",
getClass() "-workingDir", workingDir.toString()
.getResource(
"/eu/dnetlib/dhp/orcidtoresultfromsemrel/preparedInfo/mergedOrcidAssoc")
.getPath()
}); });
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());

View File

@ -7,6 +7,7 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -30,7 +31,7 @@ public class ResultToCommunityJobTest {
private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class); private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static SparkSession spark; private static SparkSession spark;

View File

@ -10,6 +10,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -34,7 +35,7 @@ public class ResultToCommunityJobTest {
private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class); private static final Logger log = LoggerFactory.getLogger(ResultToCommunityJobTest.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static SparkSession spark; private static SparkSession spark;

View File

@ -0,0 +1 @@
{"subRelType": "supplement", "relClass": "isSupplementedBy", "dataInfo": {"provenanceaction": {"classid": "iis", "classname": "Inferred by OpenAIRE", "schemeid": "dnet:provenanceActions", "schemename": "dnet:provenanceActions"}, "deletedbyinference": false, "inferred": true, "inferenceprovenance": "iis::document_affiliations", "invisible": false, "trust": "0.7731"}, "target": "50|dedup_wf_001::95b033c0c3961f6a1cdcd41a99a9632e", "lastupdatetimestamp": 1694431186898, "relType": "resultOrganization", "source": "50|dedup_wf_001::36bcfaa1494c849547a346da688ade24", "collectedfrom": [], "validated": false, "properties": []}