1
0
Fork 0

changed test to comply with the modifications

This commit is contained in:
Miriam Baglioni 2020-07-20 17:55:18 +02:00
parent 08dbd99455
commit 5076e4f320
4 changed files with 25 additions and 65 deletions

View File

@ -5,9 +5,13 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.dhp.schema.oaf.Software;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -19,7 +23,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
//@ExtendWith(MockitoExtension.class) //@ExtendWith(MockitoExtension.class)
public class DumpJobTest { public class DumpJobTest {
@ -34,7 +37,7 @@ public class DumpJobTest {
private static final Logger log = LoggerFactory.getLogger(DumpJobTest.class); private static final Logger log = LoggerFactory.getLogger(DumpJobTest.class);
private static HashMap<String, String> map = new HashMap<>(); private static CommunityMap map = new CommunityMap();
static { static {
map.put("egi", "EGI Federation"); map.put("egi", "EGI Federation");
@ -127,20 +130,14 @@ public class DumpJobTest {
} }
@Test @Test
public void testDataset() throws Exception { public void testDataset() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/dataset.json")
.getPath(); .getPath();
DumpProducts dump = new DumpProducts();
dump.run(false, sourcePath, workingDir.toString() + "/result", map, Dataset.class, false);
SparkDumpCommunityProducts.main(new String[] {
"-isLookUpUrl", MOCK_IS_LOOK_UP_URL,
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/result",
"-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
"-communityMap", new Gson().toJson(map)
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -186,20 +183,13 @@ public class DumpJobTest {
} }
@Test @Test
public void testPublication() throws Exception { public void testPublication() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/publication.json")
.getPath(); .getPath();
DumpProducts dump = new DumpProducts();
SparkDumpCommunityProducts.main(new String[] { dump.run(false, sourcePath, workingDir.toString() + "/result", map, Publication.class, false);
"-isLookUpUrl", MOCK_IS_LOOK_UP_URL,
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/result",
"-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
"-communityMap", new Gson().toJson(map)
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -220,20 +210,15 @@ public class DumpJobTest {
} }
@Test @Test
public void testSoftware() throws Exception { public void testSoftware() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/software.json")
.getPath(); .getPath();
SparkDumpCommunityProducts.main(new String[] { DumpProducts dump = new DumpProducts();
"-isLookUpUrl", MOCK_IS_LOOK_UP_URL, dump.run(false, sourcePath, workingDir.toString() + "/result", map, Software.class, false);
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/result",
"-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Software",
"-communityMap", new Gson().toJson(map)
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -254,20 +239,15 @@ public class DumpJobTest {
} }
@Test @Test
public void testORP() throws Exception { public void testORP() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/orp.json")
.getPath(); .getPath();
SparkDumpCommunityProducts.main(new String[] { DumpProducts dump = new DumpProducts();
"-isLookUpUrl", MOCK_IS_LOOK_UP_URL, dump.run(false, sourcePath, workingDir.toString() + "/result", map, OtherResearchProduct.class, false);
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/result",
"-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.OtherResearchProduct",
"-communityMap", new Gson().toJson(map)
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -288,19 +268,13 @@ public class DumpJobTest {
} }
@Test @Test
public void testRecord() throws Exception { public void testRecord() {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/singelRecord_pub.json") .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/singelRecord_pub.json")
.getPath(); .getPath();
SparkDumpCommunityProducts.main(new String[] { DumpProducts dump = new DumpProducts();
"-isLookUpUrl", MOCK_IS_LOOK_UP_URL, dump.run(false, sourcePath, workingDir.toString() + "/result", map, Publication.class, false);
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/result",
"-sourcePath", sourcePath,
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
"-communityMap", new Gson().toJson(map)
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());

View File

@ -7,7 +7,6 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.neethi.Assertion;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -15,8 +14,6 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import com.google.gson.Gson;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;

View File

@ -8,6 +8,7 @@ import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;

View File

@ -3,26 +3,14 @@ package eu.dnetlib.dhp.oa.graph.dump;
import java.io.*; import java.io.*;
import java.io.File; import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.IOUtils; import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.*;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import com.google.gson.Gson;
import eu.dnetlib.dhp.oa.graph.dump.zenodo.*;
import eu.dnetlib.dhp.schema.dump.oaf.*;
public class ZenodoUploadTest { public class ZenodoUploadTest {
private static String workingDir; private static String workingDir;