2020-05-20 13:53:08 +02:00
|
|
|
|
|
|
|
package eu.dnetlib.dhp.actionmanager.project;
|
|
|
|
|
2020-05-22 15:27:19 +02:00
|
|
|
import java.io.IOException;
|
|
|
|
import java.nio.file.Files;
|
|
|
|
import java.nio.file.Path;
|
2023-02-28 14:44:00 +01:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.List;
|
2020-05-22 15:27:19 +02:00
|
|
|
|
2020-05-20 13:53:08 +02:00
|
|
|
import org.apache.commons.io.FileUtils;
|
2023-02-28 14:44:00 +01:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-05-20 13:53:08 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.junit.jupiter.api.AfterAll;
|
|
|
|
import org.junit.jupiter.api.Assertions;
|
|
|
|
import org.junit.jupiter.api.BeforeAll;
|
|
|
|
import org.junit.jupiter.api.Test;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2023-02-28 14:44:00 +01:00
|
|
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
|
|
import com.fasterxml.jackson.core.type.TypeReference;
|
2020-05-22 15:27:19 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
|
2021-08-12 18:12:15 +02:00
|
|
|
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
|
2023-02-28 14:44:00 +01:00
|
|
|
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
|
2020-05-20 13:53:08 +02:00
|
|
|
|
|
|
|
public class PrepareProjectTest {
|
|
|
|
|
|
|
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
|
|
|
|
|
|
private static final ClassLoader cl = PrepareProjectTest.class
|
|
|
|
.getClassLoader();
|
|
|
|
|
|
|
|
private static SparkSession spark;
|
|
|
|
|
|
|
|
private static Path workingDir;
|
|
|
|
private static final Logger log = LoggerFactory
|
|
|
|
.getLogger(PrepareProjectTest.class);
|
|
|
|
|
|
|
|
@BeforeAll
|
|
|
|
public static void beforeAll() throws IOException {
|
|
|
|
workingDir = Files
|
|
|
|
.createTempDirectory(PrepareProjectTest.class.getSimpleName());
|
|
|
|
log.info("using work dir {}", workingDir);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
conf.setAppName(PrepareProjectTest.class.getSimpleName());
|
|
|
|
|
|
|
|
conf.setMaster("local[*]");
|
|
|
|
conf.set("spark.driver.host", "localhost");
|
|
|
|
conf.set("hive.metastore.local", "true");
|
|
|
|
conf.set("spark.ui.enabled", "false");
|
|
|
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
|
|
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
|
|
|
|
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(PrepareProjectTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
|
|
|
}
|
|
|
|
|
|
|
|
@AfterAll
|
|
|
|
public static void afterAll() throws IOException {
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
|
|
|
spark.stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
2021-08-11 12:13:22 +02:00
|
|
|
void numberDistinctProjectTest() throws Exception {
|
2020-05-20 13:53:08 +02:00
|
|
|
PrepareProjects
|
|
|
|
.main(
|
|
|
|
new String[] {
|
|
|
|
"-isSparkSessionManaged",
|
|
|
|
Boolean.FALSE.toString(),
|
|
|
|
"-projectPath",
|
2023-02-28 14:44:00 +01:00
|
|
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects.json").getPath(),
|
2020-05-20 13:53:08 +02:00
|
|
|
"-outputPath",
|
2020-05-28 23:53:24 +02:00
|
|
|
workingDir.toString() + "/preparedProjects",
|
|
|
|
"-dbProjectPath",
|
|
|
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/dbProject").getPath(),
|
|
|
|
|
2020-05-20 13:53:08 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
|
|
|
|
|
|
|
JavaRDD<CSVProject> tmp = sc
|
|
|
|
.textFile(workingDir.toString() + "/preparedProjects")
|
|
|
|
.map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class));
|
|
|
|
|
2020-05-28 23:53:24 +02:00
|
|
|
Assertions.assertEquals(8, tmp.count());
|
2020-05-20 13:53:08 +02:00
|
|
|
|
|
|
|
Dataset<CSVProject> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class));
|
|
|
|
|
|
|
|
Assertions.assertEquals(0, verificationDataset.filter("length(id) = 0").count());
|
|
|
|
Assertions.assertEquals(0, verificationDataset.filter("length(programme) = 0").count());
|
|
|
|
}
|
|
|
|
|
2023-02-28 14:44:00 +01:00
|
|
|
@Test
|
|
|
|
void readJsonNotMultiline() throws IOException {
|
|
|
|
|
|
|
|
String projects = IOUtils
|
|
|
|
.toString(
|
|
|
|
PrepareProjectTest.class
|
|
|
|
.getResourceAsStream(("/eu/dnetlib/dhp/actionmanager/project/projects.json")));
|
|
|
|
ArrayList<Project> a = OBJECT_MAPPER.readValue(projects, new TypeReference<List<Project>>() {
|
|
|
|
});
|
|
|
|
|
|
|
|
a.forEach(p -> {
|
|
|
|
try {
|
|
|
|
OBJECT_MAPPER.writeValueAsString(p);
|
|
|
|
} catch (JsonProcessingException e) {
|
|
|
|
e.printStackTrace();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
JavaRDD<Project> b = new JavaSparkContext(spark.sparkContext()).parallelize(a);
|
|
|
|
|
|
|
|
// System.out.println("pr");
|
|
|
|
// Dataset<Project> prova = spark
|
|
|
|
// .read()
|
|
|
|
// .textFile(inputPath)
|
|
|
|
// .map((MapFunction<String, Project>) value -> OBJECT_MAPPER.readValue(value, new TypeReference<List<Project>>() {
|
|
|
|
// }), Encoders.bean(Project.class));
|
|
|
|
|
|
|
|
// prova.foreach(
|
|
|
|
// (ForeachFunction<Project>) p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
|
|
|
|
|
|
|
|
// objectMapper.readValue(jsonArray, new TypeReference<List<Student>>() {})
|
|
|
|
// Dataset<Project> p = readPath(spark, inputPath, Projects.class)
|
|
|
|
// .flatMap((FlatMapFunction<Projects, Project>) ps -> ps.getProjects().iterator(), Encoders.bean(Project.class
|
|
|
|
// ));
|
|
|
|
//import com.fasterxml.jackson.core.type.TypeReference;
|
|
|
|
// System.out.println(p.count());
|
|
|
|
}
|
|
|
|
|
2020-05-20 13:53:08 +02:00
|
|
|
}
|