forked from D-Net/dnet-hadoop
[DUMP DELTA PROJECTS] refactoring
This commit is contained in:
parent
1d1fe3b151
commit
71744a1f52
|
@ -1,80 +1,79 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.projectssubset;
|
package eu.dnetlib.dhp.oa.graph.dump.projectssubset;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class ProjectsSubsetSparkJob implements Serializable {
|
public class ProjectsSubsetSparkJob implements Serializable {
|
||||||
private static final Logger log = LoggerFactory.getLogger(ProjectsSubsetSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(ProjectsSubsetSparkJob.class);
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
String jsonConfiguration = IOUtils
|
public static void main(String[] args) throws Exception {
|
||||||
.toString(
|
String jsonConfiguration = IOUtils
|
||||||
ProjectsSubsetSparkJob.class
|
.toString(
|
||||||
.getResourceAsStream(
|
ProjectsSubsetSparkJob.class
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/project_subset_parameters.json"));
|
.getResourceAsStream(
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
"/eu/dnetlib/dhp/oa/graph/dump/project_subset_parameters.json"));
|
||||||
parser.parseArgument(args);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
Boolean isSparkSessionManaged = Optional
|
parser.parseArgument(args);
|
||||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
Boolean isSparkSessionManaged = Optional
|
||||||
.map(Boolean::valueOf)
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.orElse(Boolean.TRUE);
|
.map(Boolean::valueOf)
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
.orElse(Boolean.TRUE);
|
||||||
final String inputPath = parser.get("sourcePath");
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
log.info("inputPath: {}", inputPath);
|
final String inputPath = parser.get("sourcePath");
|
||||||
final String outputPath = parser.get("outputPath");
|
log.info("inputPath: {}", inputPath);
|
||||||
log.info("outputPath: {}", outputPath);
|
final String outputPath = parser.get("outputPath");
|
||||||
final String projectListPath = parser.get("projectListPath");
|
log.info("outputPath: {}", outputPath);
|
||||||
log.info("projectListPath: {}", projectListPath);
|
final String projectListPath = parser.get("projectListPath");
|
||||||
SparkConf conf = new SparkConf();
|
log.info("projectListPath: {}", projectListPath);
|
||||||
runWithSparkSession(
|
SparkConf conf = new SparkConf();
|
||||||
conf,
|
runWithSparkSession(
|
||||||
isSparkSessionManaged,
|
conf,
|
||||||
spark -> {
|
isSparkSessionManaged,
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
spark -> {
|
||||||
getNewProjectList(spark, inputPath, outputPath, projectListPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
});
|
getNewProjectList(spark, inputPath, outputPath, projectListPath);
|
||||||
}
|
});
|
||||||
private static void getNewProjectList(SparkSession spark, String inputPath, String outputPath,
|
}
|
||||||
String projectListPath) {
|
|
||||||
Dataset<String> projectList = spark.read().textFile(projectListPath);
|
private static void getNewProjectList(SparkSession spark, String inputPath, String outputPath,
|
||||||
// projectList.show(false);
|
String projectListPath) {
|
||||||
Dataset<Project> projects;
|
Dataset<String> projectList = spark.read().textFile(projectListPath);
|
||||||
projects = Utils.readPath(spark, inputPath, Project.class);
|
Dataset<Project> projects;
|
||||||
projects
|
projects = Utils.readPath(spark, inputPath, Project.class);
|
||||||
.joinWith(projectList, projects.col("id").equalTo(projectList.col("value")), "left")
|
projects
|
||||||
.map((MapFunction<Tuple2<Project, String>, Project>) t2 -> {
|
.joinWith(projectList, projects.col("id").equalTo(projectList.col("value")), "left")
|
||||||
if (Optional.ofNullable(t2._2()).isPresent())
|
.map((MapFunction<Tuple2<Project, String>, Project>) t2 -> {
|
||||||
return null;
|
if (Optional.ofNullable(t2._2()).isPresent())
|
||||||
return t2._1();
|
return null;
|
||||||
}, Encoders.bean(Project.class))
|
return t2._1();
|
||||||
.filter(Objects::nonNull)
|
}, Encoders.bean(Project.class))
|
||||||
.write()
|
.filter(Objects::nonNull)
|
||||||
.mode(SaveMode.Overwrite)
|
.write()
|
||||||
.option("compression", "gzip")
|
.mode(SaveMode.Overwrite)
|
||||||
.json(outputPath);
|
.option("compression", "gzip")
|
||||||
Utils
|
.json(outputPath);
|
||||||
.readPath(spark, outputPath, Project.class)
|
Utils
|
||||||
.map((MapFunction<Project, String>) p -> p.getId(), Encoders.STRING())
|
.readPath(spark, outputPath, Project.class)
|
||||||
.write()
|
.map((MapFunction<Project, String>) p -> p.getId(), Encoders.STRING())
|
||||||
.mode(SaveMode.Append)
|
.write()
|
||||||
.option("compression", "gzip")
|
.mode(SaveMode.Append)
|
||||||
.text(projectListPath);
|
.option("compression", "gzip")
|
||||||
}
|
.text(projectListPath);
|
||||||
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue