This commit is contained in:
Miriam Baglioni 2020-05-20 10:29:57 +02:00
parent faed7521bf
commit 5e9c9fa87c
2 changed files with 3 additions and 5 deletions

View File

@ -73,7 +73,7 @@ public class PrepareProgrammeTest {
"-isSparkSessionManaged", "-isSparkSessionManaged",
Boolean.FALSE.toString(), Boolean.FALSE.toString(),
"-programmePath", "-programmePath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/whole_programme.json").getPath(), getClass().getResource("/eu/dnetlib/dhp/actionmanager/whole_programme.json.gz").getPath(),
"-outputPath", "-outputPath",
workingDir.toString() + "/preparedProgramme" workingDir.toString() + "/preparedProgramme"
}); });

View File

@ -9,8 +9,6 @@ import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
@ -72,7 +70,7 @@ public class SparkUpdateProjectTest {
"-isSparkSessionManaged", "-isSparkSessionManaged",
Boolean.FALSE.toString(), Boolean.FALSE.toString(),
"-programmePath", "-programmePath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/preparedProgramme_whole.json").getPath(), getClass().getResource("/eu/dnetlib/dhp/actionmanager/preparedProgramme_whole.json.gz").getPath(),
"-projectPath", "-projectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/projects_subset.json").getPath(), getClass().getResource("/eu/dnetlib/dhp/actionmanager/projects_subset.json").getPath(),
"-outputPath", "-outputPath",
@ -85,7 +83,7 @@ public class SparkUpdateProjectTest {
.textFile(workingDir.toString() + "/actionSet") .textFile(workingDir.toString() + "/actionSet")
.map(item -> OBJECT_MAPPER.readValue(item, Project.class)); .map(item -> OBJECT_MAPPER.readValue(item, Project.class));
Assertions.assertEquals(14, tmp.count()); Assertions.assertEquals(16, tmp.count());
// Dataset<CSVProgramme> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class)); // Dataset<CSVProgramme> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class));
// //