dnet-hadoop/dhp-workflows/dhp-graph-provision-scholex.../src/main/java/eu/dnetlib/dhp/provision/SparkExtractRelationCount.java

35 lines
1.1 KiB
Java
Raw Normal View History

2020-04-29 13:24:29 +02:00
2020-02-24 10:15:55 +01:00
package eu.dnetlib.dhp.provision;
import org.apache.commons.io.IOUtils;
2020-03-19 11:11:07 +01:00
import org.apache.spark.sql.*;
2020-02-24 10:15:55 +01:00
2020-04-29 13:24:29 +02:00
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
2020-02-24 10:15:55 +01:00
/**
2020-04-29 13:24:29 +02:00
* SparkExtractRelationCount is a spark job that takes in input relation RDD and retrieve for each item in relation
* which are the number of - Related Dataset - Related Publication - Related Unknown
2020-02-24 10:15:55 +01:00
*/
public class SparkExtractRelationCount {
2020-04-29 13:24:29 +02:00
public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
SparkExtractRelationCount.class
.getResourceAsStream(
"/eu/dnetlib/dhp/provision/input_related_entities_parameters.json")));
parser.parseArgument(args);
final SparkSession spark = SparkSession
.builder()
.appName(SparkExtractRelationCount.class.getSimpleName())
.master(parser.get("master"))
.getOrCreate();
2020-02-24 10:15:55 +01:00
2020-04-29 13:24:29 +02:00
final String workingDirPath = parser.get("workingDirPath");
2020-02-24 10:15:55 +01:00
2020-04-29 13:24:29 +02:00
final String relationPath = parser.get("relationPath");
DatasetJoiner.startJoin(spark, relationPath, workingDirPath + "/relatedItemCount");
}
2020-02-24 10:15:55 +01:00
}