From 817cddfc520073b99d2230ae6f0a73aa1503bec2 Mon Sep 17 00:00:00 2001 From: "miriam.baglioni" Date: Tue, 7 Jul 2020 18:25:12 +0200 Subject: [PATCH] - --- .../dhp/oa/graph/dump/DumpJobTest.java | 23 +------------------ 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java index e21b71de8..d52be7056 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/dump/DumpJobTest.java @@ -63,13 +63,7 @@ public class DumpJobTest { } -// @Mock -// private SparkDumpCommunityProducts dumpCommunityProducts; - // private QueryInformationSystem queryInformationSystem; - -// @Mock -// private ISLookUpService isLookUpService; List communityMap = Arrays .asList( @@ -128,12 +122,7 @@ public class DumpJobTest { .getOrCreate(); } -// @BeforeEach -// public void setUp() throws ISLookUpException { -// lenient().when(isLookUpService.quickSearchProfile(XQUERY)).thenReturn(communityMap); -// lenient().when(dumpCommunityProducts.getIsLookUpService(MOCK_IS_LOOK_UP_URL)).thenReturn(isLookUpService); -// -// } + @AfterAll public static void afterAll() throws IOException { @@ -157,7 +146,6 @@ public class DumpJobTest { "-communityMap", new Gson().toJson(map) }); -// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset"); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); @@ -169,7 +157,6 @@ public class DumpJobTest { .createDataset(tmp.rdd(), Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.Result.class)); Assertions.assertEquals(90, verificationDataset.count()); - // verificationDataset.show(false); Assertions .assertTrue( @@ -199,7 +186,6 @@ public class DumpJobTest { Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90); - // verificationDataset.select("instance.type").show(false); //TODO verify value and name of the fields for vocab related value (i.e. accessright, bestaccessright) @@ -221,8 +207,6 @@ public class DumpJobTest { "-communityMap", new Gson().toJson(map) }); -// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset"); - final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaRDD tmp = sc @@ -257,8 +241,6 @@ public class DumpJobTest { "-communityMap", new Gson().toJson(map) }); -// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset"); - final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaRDD tmp = sc @@ -293,7 +275,6 @@ public class DumpJobTest { "-communityMap", new Gson().toJson(map) }); -// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset"); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); @@ -329,8 +310,6 @@ public class DumpJobTest { "-communityMap", new Gson().toJson(map) }); -// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset"); - final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaRDD tmp = sc