dhp-graph-dump/dump/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java

310 lines
10 KiB
Java

package eu.dnetlib.dhp.oa.graph.dump;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.eosc.model.Subject;
import eu.dnetlib.dhp.oa.graph.dump.eosc.CommunityMap;
import eu.dnetlib.dhp.oa.graph.dump.eosc.SelectEoscResultsJobStep1;
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.dhp.schema.oaf.Software;
//@Disabled
public class DumpJobTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory.getLogger(DumpJobTest.class);
private static final CommunityMap map = new CommunityMap();
static {
map.put("egi", "EGI Federation");
map.put("fet-fp7", "FET FP7");
map.put("fet-h2020", "FET H2020");
map.put("clarin", "CLARIN");
map.put("fam", "Fisheries and Aquaculture Management");
map.put("ni", "Neuroinformatics");
map.put("mes", "European Marine Scinece");
map.put("instruct", "Instruct-Eric");
map.put("rda", "Research Data Alliance");
map.put("elixir-gr", "ELIXIR GR");
map.put("aginfra", "Agricultural and Food Sciences");
map.put("dariah", "DARIAH EU");
map.put("risis", "RISI");
map.put("ee", "SDSN - Greece");
map.put("oa-pg", "EC Post-Grant Open Access Pilot");
map.put("beopen", "Transport Research");
map.put("euromarine", "Euromarine");
map.put("ifremer", "Ifremer");
map.put("dh-ch", "Digital Humanities and Cultural Heritage");
map.put("science-innovation-policy", "Science and Innovation Policy Studies");
map.put("covid-19", "COVID-19");
map.put("enrmaps", "Energy Research");
map.put("epos", "EPOS");
}
List<String> communityMap = Arrays
.asList(
"<community id=\"egi\" label=\"EGI Federation\"/>",
"<community id=\"fet-fp7\" label=\"FET FP7\"/>",
"<community id=\"fet-h2020\" label=\"FET H2020\"/>",
"<community id=\"clarin\" label=\"CLARIN\"/>",
"<community id=\"rda\" label=\"Research Data Alliance\"/>",
"<community id=\"ee\" label=\"SDSN - Greece\"/>",
"<community id=\"dh-ch\" label=\"Digital Humanities and Cultural Heritage\"/>",
"<community id=\"fam\" label=\"Fisheries and Aquaculture Management\"/>",
"<community id=\"ni\" label=\"Neuroinformatics\"/>",
"<community id=\"mes\" label=\"European Marine Science\"/>",
"<community id=\"instruct\" label=\"Instruct-ERIC\"/>",
"<community id=\"elixir-gr\" label=\"ELIXIR GR\"/>",
"<community id=\"aginfra\" label=\"Agricultural and Food Sciences\"/>",
"<community id=\"dariah\" label=\"DARIAH EU\"/>",
"<community id=\"risis\" label=\"RISIS\"/>",
"<community id=\"epos\" label=\"EPOS\"/>",
"<community id=\"beopen\" label=\"Transport Research\"/>",
"<community id=\"euromarine\" label=\"EuroMarine\"/>",
"<community id=\"ifremer\" label=\"Ifremer\"/>",
"<community id=\"oa-pg\" label=\"EC Post-Grant Open Access Pilot\"/>",
"<community id=\"science-innovation-policy\" label=\"Science and Innovation Policy Studies\"/>",
"<community id=\"covid-19\" label=\"COVID-19\"/>",
"<community id=\"enermaps\" label=\"Energy Research\"/>");
private static final String XQUERY = "for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') "
+
" where $x//CONFIGURATION/context[./@type='community' or ./@type='ri'] " +
" return " +
"<community> " +
"{$x//CONFIGURATION/context/@id}" +
"{$x//CONFIGURATION/context/@label}" +
"</community>";
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files.createTempDirectory(DumpJobTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(DumpJobTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(DumpJobTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
public void testMap() {
System.out.println(new Gson().toJson(map));
}
@Test
public void testEOSCDump() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/eosctag.json")
.getPath();
final String communityMapPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath();
SelectEoscResultsJobStep1
.main(
new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-sourcePath",
sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
"-outputPath", workingDir.toString() + "/working",
"-communityMapPath", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath()
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/working")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
org.apache.spark.sql.Dataset<Result> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(Result.class));
Assertions.assertEquals(1, verificationDataset.count());
Assertions.assertEquals(1, verificationDataset.filter("type = 'dataset'").count());
Assertions
.assertEquals(
1,
tmp
.filter(d -> d.getEoscIF().stream().anyMatch(c -> c.getCode().equals("EOSC::Twitter Data")))
.count());
Assertions
.assertEquals(
1,
tmp
.filter(d -> d.getEoscIF().stream().anyMatch(c -> c.getLabel().equals("EOSC::Twitter Data")))
.count());
Assertions
.assertEquals(1, tmp.filter(d -> d.getEoscIF().stream().anyMatch(c -> c.getUrl().equals(""))).count());
Assertions
.assertEquals(
1,
tmp
.filter(d -> d.getEoscIF().stream().anyMatch(c -> c.getSemanticRelation().equals("compliesWith")))
.count());
System.out.println(OBJECT_MAPPER.writeValueAsString(verificationDataset.first()));
}
@Test
public void testEOSCDumpZenodoReview() throws Exception {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
DumpProducts dump = new DumpProducts();
dump
.run(
false, getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input/review202211/zenodo7353841")
.getPath(),
workingDir.toString() + "/orp", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath(),
OtherResearchProduct.class);
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/orp")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
System.out.println(OBJECT_MAPPER.writeValueAsString(tmp.first()));
dump
.run(
false, getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input/review202211/zenodo7351393")
.getPath(),
workingDir.toString() + "/soft", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath(),
Software.class);
JavaRDD<Result> tmp2 = sc
.textFile(workingDir.toString() + "/soft")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
System.out.println(OBJECT_MAPPER.writeValueAsString(tmp2.first()));
dump
.run(
false, getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input/review202211/zenodo7351221")
.getPath(),
workingDir.toString() + "/soft2", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath(),
Software.class);
JavaRDD<Result> tmp3 = sc
.textFile(workingDir.toString() + "/soft2")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
System.out.println(OBJECT_MAPPER.writeValueAsString(tmp3.first()));
dump
.run(
false, getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input/review202211/rohub")
.getPath(),
workingDir.toString() + "/orp2", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath(),
OtherResearchProduct.class);
JavaRDD<Result> tmp4 = sc
.textFile(workingDir.toString() + "/orp2")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
System.out.println(OBJECT_MAPPER.writeValueAsString(tmp4.first()));
}
@Test
public void testEOSCDumpIndicators() throws Exception {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
DumpProducts dump = new DumpProducts();
dump
.run(
false, getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input/indicators/publication.json")
.getPath(),
workingDir.toString() + "/publication", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath(),
Publication.class);
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/publication")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
System.out.println(OBJECT_MAPPER.writeValueAsString(tmp.first()));
}
@Test
public void SerializeMap() throws JsonProcessingException {
Map<String, List<Subject>> map = new HashMap<>();
map.put("prova", new ArrayList<>());
Subject s = new Subject();
s.setProvenance(null);
s.setValue("codiv-19");
map.get("prova").add(s);
System.out.println(OBJECT_MAPPER.writeValueAsString(map));
}
}