added test for prepare projects step

This commit is contained in:
Miriam Baglioni 2020-05-20 13:53:08 +02:00
parent 5e0e554000
commit 67ba4fde57
5 changed files with 94 additions and 0 deletions

View File

@ -0,0 +1,94 @@
package eu.dnetlib.dhp.actionmanager.project;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class PrepareProjectTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final ClassLoader cl = PrepareProjectTest.class
.getClassLoader();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory
.getLogger(PrepareProjectTest.class);
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(PrepareProjectTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(PrepareProjectTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(PrepareProjectTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
public void numberDistinctProgrammeTest() throws Exception {
PrepareProjects
.main(
new String[] {
"-isSparkSessionManaged",
Boolean.FALSE.toString(),
"-projectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(),
"-outputPath",
workingDir.toString() + "/preparedProjects"
});
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
JavaRDD<CSVProject> tmp = sc
.textFile(workingDir.toString() + "/preparedProjects")
.map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class));
Assertions.assertEquals(20, tmp.count());
Dataset<CSVProject> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class));
Assertions.assertEquals(0, verificationDataset.filter("length(id) = 0").count());
Assertions.assertEquals(0, verificationDataset.filter("length(programme) = 0").count());
}
}