forked from D-Net/dnet-hadoop
close the SparkSession at the end
This commit is contained in:
parent
25ceec29ab
commit
9af3e904be
|
@ -15,13 +15,8 @@ public class SparkGraphImporterJob {
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphImporterJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json")));
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGraphImporterJob.class.getResourceAsStream("/eu/dnetlib/dhp/graph/input_graph_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
final SparkSession spark = SparkSession
|
|
||||||
.builder()
|
try(SparkSession spark = getSparkSession(parser)) {
|
||||||
.appName(SparkGraphImporterJob.class.getSimpleName())
|
|
||||||
.master(parser.get("master"))
|
|
||||||
.config("hive.metastore.uris", parser.get("hive_metastore_uris"))
|
|
||||||
.enableHiveSupport()
|
|
||||||
.getOrCreate();
|
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
final String inputPath = parser.get("sourcePath");
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
@ -39,6 +34,16 @@ public class SparkGraphImporterJob {
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.saveAsTable(hiveDbName + "." + name);
|
.saveAsTable(hiveDbName + "." + name);
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static SparkSession getSparkSession(ArgumentApplicationParser parser) {
|
||||||
|
return SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(SparkGraphImporterJob.class.getSimpleName())
|
||||||
|
.master(parser.get("master"))
|
||||||
|
.config("hive.metastore.uris", parser.get("hive_metastore_uris"))
|
||||||
|
.enableHiveSupport()
|
||||||
|
.getOrCreate();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue