dnet-hadoop/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/SplitForCommunityTest.java

173 lines
5.5 KiB
Java
Raw Normal View History

2020-06-15 11:13:51 +02:00
2020-06-15 11:06:18 +02:00
package eu.dnetlib.dhp.oa.graph.dump;
2020-07-29 17:32:02 +02:00
import java.io.File;
2020-06-15 11:13:51 +02:00
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
2020-07-29 17:32:02 +02:00
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunitySplit;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
2020-06-15 11:13:51 +02:00
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
2020-06-18 11:20:08 +02:00
import eu.dnetlib.dhp.schema.dump.oaf.Result;
2020-06-15 11:13:51 +02:00
2020-06-15 11:06:18 +02:00
public class SplitForCommunityTest {
2020-06-15 11:13:51 +02:00
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory.getLogger(DumpJobTest.class);
2020-07-29 17:32:02 +02:00
private static CommunityMap map = new CommunityMap();
2020-06-15 11:13:51 +02:00
static {
map.put("egi", "EGI Federation");
map.put("fet-fp7", "FET FP7");
map.put("fet-h2020", "FET H2020");
map.put("clarin", "CLARIN");
map.put("fam", "Fisheries and Aquaculture Management");
map.put("ni", "Neuroinformatics");
map.put("mes", "European Marine Scinece");
map.put("instruct", "Instruct-Eric");
map.put("rda", "Research Data Alliance");
map.put("elixir-gr", "ELIXIR GR");
map.put("aginfra", "Agricultural and Food Sciences");
map.put("dariah", "DARIAH EU");
map.put("risis", "RISI");
map.put("ee", "SDSN - Greece");
map.put("oa-pg", "EC Post-Grant Open Access Pilot");
map.put("beopen", "Transport Research");
map.put("euromarine", "Euromarine");
map.put("ifremer", "Ifremer");
map.put("dh-ch", "Digital Humanities and Cultural Heritage");
map.put("science-innovation-policy", "Science and Innovation Policy Studies");
map.put("covid-19", "COVID-19");
2020-06-18 11:20:08 +02:00
map.put("enermaps", "Energy Research");
2020-06-15 11:13:51 +02:00
map.put("epos", "EPOS");
}
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files.createTempDirectory(SplitForCommunityTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(SplitForCommunityTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(SplitForCommunityTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
2020-07-29 17:32:02 +02:00
public void test1() {
2020-06-15 11:13:51 +02:00
final String sourcePath = getClass()
2020-06-18 11:20:08 +02:00
.getResource("/eu/dnetlib/dhp/oa/graph/dump/splitForCommunity")
2020-06-15 11:13:51 +02:00
.getPath();
2020-07-29 17:32:02 +02:00
CommunitySplit split = new CommunitySplit();
split.run(false, sourcePath, workingDir.toString() + "/split", map);
2020-06-15 11:13:51 +02:00
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
2020-07-29 17:32:02 +02:00
JavaRDD<CommunityResult> tmp = sc
2020-06-15 11:13:51 +02:00
.textFile(workingDir.toString() + "/split/dh-ch")
2020-07-29 17:32:02 +02:00
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
2020-06-15 11:13:51 +02:00
2020-07-29 17:32:02 +02:00
org.apache.spark.sql.Dataset<CommunityResult> verificationDataset = spark
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
2020-06-15 11:13:51 +02:00
2020-06-18 11:20:08 +02:00
Assertions.assertEquals(19, verificationDataset.count());
2020-06-15 11:13:51 +02:00
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count());
tmp = sc
.textFile(workingDir.toString() + "/split/egi")
2020-07-29 17:32:02 +02:00
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
2020-06-15 11:13:51 +02:00
verificationDataset = spark
2020-07-29 17:32:02 +02:00
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
2020-06-15 11:13:51 +02:00
Assertions.assertEquals(1, verificationDataset.count());
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::e4805d005bfab0cd39a1642cbf477fdb'").count());
tmp = sc
.textFile(workingDir.toString() + "/split/ni")
2020-07-29 17:32:02 +02:00
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
2020-06-15 11:13:51 +02:00
verificationDataset = spark
2020-07-29 17:32:02 +02:00
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
2020-06-15 11:13:51 +02:00
2020-06-18 11:20:08 +02:00
Assertions.assertEquals(5, verificationDataset.count());
2020-06-15 11:13:51 +02:00
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|datacite____::6b1e3a2fa60ed8c27317a66d6357f795'").count());
tmp = sc
.textFile(workingDir.toString() + "/split/science-innovation-policy")
2020-07-29 17:32:02 +02:00
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
2020-06-15 11:13:51 +02:00
verificationDataset = spark
2020-07-29 17:32:02 +02:00
.createDataset(tmp.rdd(), Encoders.bean(CommunityResult.class));
2020-06-15 11:13:51 +02:00
2020-07-29 17:32:02 +02:00
Assertions.assertEquals(4, verificationDataset.count());
2020-06-15 11:13:51 +02:00
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::0347b1cd516fc59e41ba92e0d74e4e9f'").count());
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::1432beb6171baa5da8a85a7f99545d69'").count());
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::1c8bd19e633976e314b88ce5c3f92d69'").count());
Assertions
.assertEquals(
1, verificationDataset.filter("id = '50|dedup_wf_001::51b88f272ba9c3bb181af64e70255a80'").count());
}
2020-06-15 11:06:18 +02:00
}