dnet-hadoop/dhp-workflows/dhp-graph-mapper/src/main/java/eu/dnetlib/dhp/graph/SparkGraphImporterJob.java

62 lines
2.4 KiB
Java
Raw Normal View History

2019-10-24 16:00:28 +02:00
package eu.dnetlib.dhp.graph;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
2019-10-24 16:00:28 +02:00
import org.apache.spark.api.java.JavaSparkContext;
2019-10-25 09:24:18 +02:00
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
2019-10-24 16:00:28 +02:00
import org.apache.spark.sql.SparkSession;
public class SparkGraphImporterJob {
public static void main(String[] args) throws Exception {
2019-10-24 16:00:28 +02:00
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString(SparkGraphImporterJob.class.getResourceAsStream(
"/eu/dnetlib/dhp/graph/input_graph_parameters.json")));
parser.parseArgument(args);
2020-03-04 10:53:31 +01:00
2020-03-26 18:26:40 +01:00
new SparkGraphImporterJob().run(parser);
}
private void run(ArgumentApplicationParser parser) {
2020-03-04 10:53:31 +01:00
try(SparkSession spark = getSparkSession(parser)) {
final String inputPath = parser.get("sourcePath");
final String hiveDbName = parser.get("hive_db_name");
2020-03-26 18:26:40 +01:00
runWith(spark, inputPath, hiveDbName);
2020-03-04 10:53:31 +01:00
}
}
2020-03-26 18:26:40 +01:00
// public for testing
public void runWith(SparkSession spark, String inputPath, String hiveDbName) {
spark.sql(String.format("DROP DATABASE IF EXISTS %s CASCADE", hiveDbName));
spark.sql(String.format("CREATE DATABASE IF NOT EXISTS %s", hiveDbName));
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
// Read the input file and convert it into RDD of serializable object
GraphMappingUtils.types.forEach((name, clazz) -> spark.createDataset(sc.textFile(inputPath + "/" + name)
.map(s -> new ObjectMapper().readValue(s, clazz))
.rdd(), Encoders.bean(clazz))
.write()
.mode(SaveMode.Overwrite)
.saveAsTable(hiveDbName + "." + name));
}
2020-03-04 10:53:31 +01:00
private static SparkSession getSparkSession(ArgumentApplicationParser parser) {
SparkConf conf = new SparkConf();
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
2020-03-04 10:53:31 +01:00
return SparkSession
2019-10-24 16:00:28 +02:00
.builder()
.appName(SparkGraphImporterJob.class.getSimpleName())
.master(parser.get("master"))
.config(conf)
.enableHiveSupport()
2019-10-24 16:00:28 +02:00
.getOrCreate();
}
}