refactoring

This commit is contained in:
Miriam Baglioni 2020-11-25 18:23:28 +01:00
parent 1a89f8211c
commit 124591a7f3
6 changed files with 188 additions and 186 deletions

View File

@ -26,7 +26,6 @@ public class Constants {
public static String ORCID = "orcid"; public static String ORCID = "orcid";
static { static {
accessRightsCoarMap.put("OPEN", "c_abf2"); accessRightsCoarMap.put("OPEN", "c_abf2");
accessRightsCoarMap.put("RESTRICTED", "c_16ec"); accessRightsCoarMap.put("RESTRICTED", "c_16ec");

View File

@ -6,7 +6,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.*;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -20,6 +19,7 @@ import eu.dnetlib.dhp.oa.graph.dump.Constants;
import eu.dnetlib.dhp.oa.graph.dump.ResultMapper; import eu.dnetlib.dhp.oa.graph.dump.ResultMapper;
import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap; import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult; import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project; import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;

View File

@ -6,7 +6,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable; import java.io.Serializable;
import java.util.Optional; import java.util.Optional;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -21,6 +20,7 @@ import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.dump.Constants; import eu.dnetlib.dhp.oa.graph.dump.Constants;
import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import scala.Tuple2; import scala.Tuple2;

View File

@ -164,7 +164,7 @@ public class CreateEntityTest {
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci); final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
queryInformationSystem.getContextInformation(consumer); queryInformationSystem.getContextInformation(consumer);
for(ContextInfo cInfo: cInfoList){ for (ContextInfo cInfo : cInfoList) {
writer.write(new Gson().toJson(Process.getEntity(cInfo))); writer.write(new Gson().toJson(Process.getEntity(cInfo)));
} }
writer.close(); writer.close();

View File

@ -1,9 +1,11 @@
package eu.dnetlib.dhp.oa.graph.dump.funderresult; package eu.dnetlib.dhp.oa.graph.dump.funderresult;
import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException;
import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject; import java.nio.file.Files;
import eu.dnetlib.dhp.schema.oaf.Publication; import java.nio.file.Path;
import eu.dnetlib.dhp.schema.oaf.Result; import java.util.HashMap;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -17,117 +19,119 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import com.fasterxml.jackson.databind.ObjectMapper;
import java.nio.file.Files;
import java.nio.file.Path; import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject;
import java.util.HashMap; import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.dhp.schema.oaf.Result;
public class ResultLinkedToProjectTest { public class ResultLinkedToProjectTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark; private static SparkSession spark;
private static Path workingDir; private static Path workingDir;
private static final Logger log = LoggerFactory private static final Logger log = LoggerFactory
.getLogger(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class); .getLogger(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class);
private static HashMap<String, String> map = new HashMap<>(); private static HashMap<String, String> map = new HashMap<>();
@BeforeAll @BeforeAll
public static void beforeAll() throws IOException { public static void beforeAll() throws IOException {
workingDir = Files workingDir = Files
.createTempDirectory(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName()); .createTempDirectory(
log.info("using work dir {}", workingDir); eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.setAppName(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName()); conf.setAppName(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName());
conf.setMaster("local[*]"); conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost"); conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true"); conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false"); conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString()); conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession spark = SparkSession
.builder() .builder()
.appName(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName()) .appName(eu.dnetlib.dhp.oa.graph.dump.funderresult.ResultLinkedToProjectTest.class.getSimpleName())
.config(conf) .config(conf)
.getOrCreate(); .getOrCreate();
} }
@AfterAll @AfterAll
public static void afterAll() throws IOException { public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile()); FileUtils.deleteDirectory(workingDir.toFile());
spark.stop(); spark.stop();
} }
@Test @Test
public void testNoMatch() throws Exception { public void testNoMatch() throws Exception {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/nomatch/papers.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/nomatch/papers.json")
.getPath(); .getPath();
final String relationPath = getClass() final String relationPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/nomatch/relations.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/nomatch/relations.json")
.getPath(); .getPath();
SparkResultLinkedToProject.main(new String[] { SparkResultLinkedToProject.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(), "-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/preparedInfo", "-outputPath", workingDir.toString() + "/preparedInfo",
"-sourcePath", sourcePath, "-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication", "-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
"-relationPath", relationPath "-relationPath", relationPath
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Result> tmp = sc JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/preparedInfo") .textFile(workingDir.toString() + "/preparedInfo")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class)); .map(item -> OBJECT_MAPPER.readValue(item, Result.class));
org.apache.spark.sql.Dataset<Result> verificationDataset = spark org.apache.spark.sql.Dataset<Result> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(Result.class)); .createDataset(tmp.rdd(), Encoders.bean(Result.class));
Assertions.assertEquals(0, verificationDataset.count()); Assertions.assertEquals(0, verificationDataset.count());
} }
@Test @Test
public void testMatchOne() throws Exception { public void testMatchOne() throws Exception {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/papers.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/papers.json")
.getPath(); .getPath();
final String relationPath = getClass() final String relationPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/relations.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/relations.json")
.getPath(); .getPath();
SparkResultLinkedToProject.main(new String[] { SparkResultLinkedToProject.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(), "-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/preparedInfo", "-outputPath", workingDir.toString() + "/preparedInfo",
"-sourcePath", sourcePath, "-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication", "-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
"-relationPath", relationPath "-relationPath", relationPath
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Publication> tmp = sc JavaRDD<Publication> tmp = sc
.textFile(workingDir.toString() + "/preparedInfo") .textFile(workingDir.toString() + "/preparedInfo")
.map(item -> OBJECT_MAPPER.readValue(item, Publication.class)); .map(item -> OBJECT_MAPPER.readValue(item, Publication.class));
org.apache.spark.sql.Dataset<Publication> verificationDataset = spark org.apache.spark.sql.Dataset<Publication> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(Publication.class)); .createDataset(tmp.rdd(), Encoders.bean(Publication.class));
Assertions.assertEquals(1, verificationDataset.count()); Assertions.assertEquals(1, verificationDataset.count());
} }
} }

View File

@ -1,11 +1,10 @@
package eu.dnetlib.dhp.oa.graph.dump.funderresult; package eu.dnetlib.dhp.oa.graph.dump.funderresult;
import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException;
import eu.dnetlib.dhp.oa.graph.dump.DumpJobTest; import java.nio.file.Files;
import eu.dnetlib.dhp.oa.graph.dump.SplitForCommunityTest; import java.nio.file.Path;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunitySplit;
import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -19,124 +18,124 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import com.fasterxml.jackson.databind.ObjectMapper;
import java.nio.file.Files;
import java.nio.file.Path; import eu.dnetlib.dhp.oa.graph.dump.DumpJobTest;
import eu.dnetlib.dhp.oa.graph.dump.SplitForCommunityTest;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunitySplit;
import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
public class SplitPerFunderTest { public class SplitPerFunderTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark; private static SparkSession spark;
private static Path workingDir; private static Path workingDir;
private static final Logger log = LoggerFactory.getLogger(SplitPerFunderTest.class); private static final Logger log = LoggerFactory.getLogger(SplitPerFunderTest.class);
@BeforeAll @BeforeAll
public static void beforeAll() throws IOException { public static void beforeAll() throws IOException {
workingDir = Files.createTempDirectory(SplitPerFunderTest.class.getSimpleName()); workingDir = Files.createTempDirectory(SplitPerFunderTest.class.getSimpleName());
log.info("using work dir {}", workingDir); log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.setAppName(SplitPerFunderTest.class.getSimpleName()); conf.setAppName(SplitPerFunderTest.class.getSimpleName());
conf.setMaster("local[*]"); conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost"); conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true"); conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false"); conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString()); conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString()); conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession spark = SparkSession
.builder() .builder()
.appName(SplitPerFunderTest.class.getSimpleName()) .appName(SplitPerFunderTest.class.getSimpleName())
.config(conf) .config(conf)
.getOrCreate(); .getOrCreate();
} }
@AfterAll @AfterAll
public static void afterAll() throws IOException { public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile()); FileUtils.deleteDirectory(workingDir.toFile());
spark.stop(); spark.stop();
} }
@Test @Test
public void test1() throws Exception { public void test1() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/extendeddump")
.getPath();
final String sourcePath = getClass() SparkDumpFunderResults.main(new String[] {
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/extendeddump") "-isSparkSessionManaged", Boolean.FALSE.toString(),
.getPath(); "-outputPath", workingDir.toString() + "/split",
"-sourcePath", sourcePath,
"-relationPath", sourcePath
});
SparkDumpFunderResults.main(new String[] { final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/split",
"-sourcePath", sourcePath,
"-relationPath", sourcePath
}); // FP7 3
JavaRDD<CommunityResult> tmp = sc
.textFile(workingDir.toString() + "/split/EC_FP7")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); Assertions.assertEquals(3, verificationDataset.count());
// FP7 3 Assertions
JavaRDD<CommunityResult> tmp = sc .assertEquals(
.textFile(workingDir.toString() + "/split/EC_FP7") 1, verificationDataset.filter("id = '50|dedup_wf_001::0d16b1714ab3077df73893a8ea57d776'").count());
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark // CIHR 2
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class)); tmp = sc
.textFile(workingDir.toString() + "/split/CIHR")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(2, tmp.count());
Assertions.assertEquals(3, verificationDataset.count()); // NWO 1
tmp = sc
.textFile(workingDir.toString() + "/split/NWO")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count());
Assertions // NIH 3
.assertEquals( tmp = sc
1, verificationDataset.filter("id = '50|dedup_wf_001::0d16b1714ab3077df73893a8ea57d776'").count()); .textFile(workingDir.toString() + "/split/NIH")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(2, tmp.count());
// NSF 1
tmp = sc
.textFile(workingDir.toString() + "/split/NSF")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count());
// CIHR 2 // SNSF 1
tmp = sc tmp = sc
.textFile(workingDir.toString() + "/split/CIHR") .textFile(workingDir.toString() + "/split/SNSF")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class)); .map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(2, tmp.count()); Assertions.assertEquals(1, tmp.count());
//NWO 1 // NHMRC 1
tmp = sc tmp = sc
.textFile(workingDir.toString() + "/split/NWO") .textFile(workingDir.toString() + "/split/NHMRC")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class)); .map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count()); Assertions.assertEquals(1, tmp.count());
//NIH 3 // H2020 3
tmp = sc tmp = sc
.textFile(workingDir.toString() + "/split/NIH") .textFile(workingDir.toString() + "/split/EC_H2020")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class)); .map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(2, tmp.count()); Assertions.assertEquals(3, tmp.count());
//NSF 1 }
tmp = sc
.textFile(workingDir.toString() + "/split/NSF")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count());
//SNSF 1
tmp = sc
.textFile(workingDir.toString() + "/split/SNSF")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count());
//NHMRC 1
tmp = sc
.textFile(workingDir.toString() + "/split/NHMRC")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(1, tmp.count());
//H2020 3
tmp = sc
.textFile(workingDir.toString() + "/split/EC_H2020")
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
Assertions.assertEquals(3, tmp.count());
}
} }