This commit is contained in:
Miriam Baglioni 2020-07-07 18:25:12 +02:00
parent a66aa9bd83
commit 817cddfc52
1 changed files with 1 additions and 22 deletions

View File

@ -63,13 +63,7 @@ public class DumpJobTest {
} }
// @Mock
// private SparkDumpCommunityProducts dumpCommunityProducts;
// private QueryInformationSystem queryInformationSystem;
// @Mock
// private ISLookUpService isLookUpService;
List<String> communityMap = Arrays List<String> communityMap = Arrays
.asList( .asList(
@ -128,12 +122,7 @@ public class DumpJobTest {
.getOrCreate(); .getOrCreate();
} }
// @BeforeEach
// public void setUp() throws ISLookUpException {
// lenient().when(isLookUpService.quickSearchProfile(XQUERY)).thenReturn(communityMap);
// lenient().when(dumpCommunityProducts.getIsLookUpService(MOCK_IS_LOOK_UP_URL)).thenReturn(isLookUpService);
//
// }
@AfterAll @AfterAll
public static void afterAll() throws IOException { public static void afterAll() throws IOException {
@ -157,7 +146,6 @@ public class DumpJobTest {
"-communityMap", new Gson().toJson(map) "-communityMap", new Gson().toJson(map)
}); });
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -169,7 +157,6 @@ public class DumpJobTest {
.createDataset(tmp.rdd(), Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.Result.class)); .createDataset(tmp.rdd(), Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.Result.class));
Assertions.assertEquals(90, verificationDataset.count()); Assertions.assertEquals(90, verificationDataset.count());
// verificationDataset.show(false);
Assertions Assertions
.assertTrue( .assertTrue(
@ -199,7 +186,6 @@ public class DumpJobTest {
Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90); Assertions.assertTrue(verificationDataset.filter("type = 'dataset'").count() == 90);
// verificationDataset.select("instance.type").show(false);
//TODO verify value and name of the fields for vocab related value (i.e. accessright, bestaccessright) //TODO verify value and name of the fields for vocab related value (i.e. accessright, bestaccessright)
@ -221,8 +207,6 @@ public class DumpJobTest {
"-communityMap", new Gson().toJson(map) "-communityMap", new Gson().toJson(map)
}); });
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc
@ -257,8 +241,6 @@ public class DumpJobTest {
"-communityMap", new Gson().toJson(map) "-communityMap", new Gson().toJson(map)
}); });
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc
@ -293,7 +275,6 @@ public class DumpJobTest {
"-communityMap", new Gson().toJson(map) "-communityMap", new Gson().toJson(map)
}); });
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
@ -329,8 +310,6 @@ public class DumpJobTest {
"-communityMap", new Gson().toJson(map) "-communityMap", new Gson().toJson(map)
}); });
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc JavaRDD<eu.dnetlib.dhp.schema.dump.oaf.Result> tmp = sc