2020-05-18 13:04:06 +02:00
|
|
|
|
|
|
|
package eu.dnetlib.dhp.actionmanager.project;
|
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.HashMap;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.Optional;
|
2020-05-18 13:04:06 +02:00
|
|
|
|
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-05-19 18:42:50 +02:00
|
|
|
import org.apache.commons.lang3.StringUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
2020-05-18 13:04:06 +02:00
|
|
|
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
|
|
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject;
|
2020-05-18 13:04:06 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
2020-05-19 18:42:50 +02:00
|
|
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
|
|
|
import scala.Tuple2;
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
public class PrepareProjects {
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(PrepareProgramme.class);
|
|
|
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
|
|
private static final HashMap<String, CSVProgramme> programmeMap = new HashMap<>();
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
public static void main(String[] args) throws Exception {
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
String jsonConfiguration = IOUtils
|
|
|
|
.toString(
|
|
|
|
PrepareProjects.class
|
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json"));
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
parser.parseArgument(args);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
.orElse(Boolean.TRUE);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
final String projectPath = parser.get("projectPath");
|
|
|
|
log.info("projectPath {}: ", projectPath);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
final String outputPath = parser.get("outputPath");
|
|
|
|
log.info("outputPath {}: ", outputPath);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
SparkConf conf = new SparkConf();
|
2020-05-18 13:04:06 +02:00
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
runWithSparkSession(
|
|
|
|
conf,
|
|
|
|
isSparkSessionManaged,
|
|
|
|
spark -> {
|
|
|
|
removeOutputDir(spark, outputPath);
|
|
|
|
exec(spark, projectPath, outputPath);
|
|
|
|
});
|
2020-05-18 13:04:06 +02:00
|
|
|
}
|
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
private static void removeOutputDir(SparkSession spark, String path) {
|
|
|
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
2020-05-18 13:04:06 +02:00
|
|
|
}
|
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
private static void exec(SparkSession spark, String progjectPath, String outputPath) {
|
|
|
|
Dataset<CSVProject> project = readPath(spark, progjectPath, CSVProject.class);
|
|
|
|
|
|
|
|
project
|
|
|
|
.toJavaRDD()
|
|
|
|
.flatMap(p -> {
|
|
|
|
List<CSVProject> csvProjectList = new ArrayList<>();
|
|
|
|
String[] programme = p.getProgramme().split(";");
|
|
|
|
if (programme.length > 1) {
|
2020-05-20 10:28:56 +02:00
|
|
|
String id = p.getId();
|
2020-05-19 18:42:50 +02:00
|
|
|
for (int i = 0; i < programme.length; i++) {
|
|
|
|
CSVProject csvProject = new CSVProject();
|
|
|
|
csvProject.setProgramme(programme[i]);
|
2020-05-20 10:28:56 +02:00
|
|
|
csvProject.setId(id);
|
2020-05-19 18:42:50 +02:00
|
|
|
csvProjectList.add(csvProject);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
csvProjectList.add(p);
|
|
|
|
}
|
|
|
|
|
|
|
|
return csvProjectList.iterator();
|
|
|
|
})
|
|
|
|
.map(p -> OBJECT_MAPPER.writeValueAsString(p))
|
|
|
|
.saveAsTextFile(outputPath);
|
2020-05-18 13:04:06 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2020-05-19 18:42:50 +02:00
|
|
|
public static <R> Dataset<R> readPath(
|
|
|
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
|
|
|
return spark
|
|
|
|
.read()
|
|
|
|
.textFile(inputPath)
|
|
|
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
2020-05-18 13:04:06 +02:00
|
|
|
}
|
|
|
|
}
|