BrBETA_dnet-hadoop/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/provision/SparkGenerateScholix.java

93 lines
3.9 KiB
Java
Raw Normal View History

2020-02-26 10:51:35 +01:00
package eu.dnetlib.dhp.provision;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
2020-03-19 11:11:07 +01:00
import eu.dnetlib.dhp.provision.scholix.*;
import eu.dnetlib.dhp.provision.scholix.summary.*;
import eu.dnetlib.dhp.schema.oaf.Relation;
2020-02-26 10:51:35 +01:00
import org.apache.commons.io.IOUtils;
2020-03-19 11:11:07 +01:00
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
2020-02-26 10:51:35 +01:00
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
2020-03-19 11:11:07 +01:00
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction;
2020-03-08 17:00:19 +01:00
import org.apache.spark.api.java.function.PairFlatMapFunction;
2020-03-19 11:11:07 +01:00
import org.apache.spark.sql.*;
import static org.apache.spark.sql.functions.col;
import scala.Int;
2020-02-26 10:51:35 +01:00
import scala.Tuple2;
2020-03-08 17:00:19 +01:00
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
2020-02-26 10:51:35 +01:00
public class SparkGenerateScholix {
2020-03-19 11:11:07 +01:00
public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateScholix.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json")));
parser.parseArgument(args);
2020-02-26 10:51:35 +01:00
2020-03-19 11:11:07 +01:00
SparkConf conf = new SparkConf();
conf.set("spark.sql.shuffle.partitions","4000");
// conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
// conf.registerKryoClasses(new Class[]{
// ScholixSummary.class,
// CollectedFromType.class,
// SchemeValue.class,
// TypedIdentifier.class,
// Typology.class,
// Relation.class,
// Scholix.class,
// ScholixCollectedFrom.class,
// ScholixEntityId.class,
// ScholixIdentifier.class,
// ScholixRelationship.class,
// ScholixResource.class
// });
2020-03-08 17:00:19 +01:00
2020-02-26 10:51:35 +01:00
final SparkSession spark = SparkSession
.builder()
2020-03-19 11:11:07 +01:00
.config(conf)
2020-02-26 10:51:35 +01:00
.appName(SparkExtractRelationCount.class.getSimpleName())
.master(parser.get("master"))
.getOrCreate();
final String graphPath = parser.get("graphPath");
final String workingDirPath = parser.get("workingDirPath");
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
2020-03-19 11:11:07 +01:00
final Dataset<ScholixSummary> scholixSummary = spark.read().load(workingDirPath + "/summary").as(Encoders.bean(ScholixSummary.class));
final Dataset<Relation> rels = spark.read().load(graphPath + "/relation").as(Encoders.bean(Relation.class));
2020-02-26 10:51:35 +01:00
2020-03-19 11:11:07 +01:00
Dataset<Scholix> firstJoin = scholixSummary.joinWith(rels, scholixSummary.col("id").equalTo(rels.col("source")))
.map((MapFunction<Tuple2<ScholixSummary, Relation>, Scholix>) f -> Scholix.generateScholixWithSource(f._1(), f._2()), Encoders.bean(Scholix.class));
firstJoin.write().mode(SaveMode.Overwrite).save(workingDirPath+"/scholix_1");
firstJoin = spark.read().load(workingDirPath+"/scholix_1").as(Encoders.bean(Scholix.class));
2020-02-26 10:51:35 +01:00
2020-03-19 11:11:07 +01:00
Dataset<Scholix> scholix_final = spark.read().load(workingDirPath+"/scholix_1").as(Encoders.bean(Scholix.class));
Dataset<ScholixResource> target = spark.read().load(workingDirPath+"/scholix_target").as(Encoders.bean(ScholixResource.class));
scholix_final.joinWith(target, scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")), "inner")
.map((MapFunction<Tuple2<Scholix, ScholixResource>, Scholix>) f -> {
final Scholix scholix = f._1();
final ScholixResource scholixTarget = f._2();
scholix.setTarget(scholixTarget);
scholix.generateIdentifier();
scholix.generatelinkPublisher();
return scholix;
}, Encoders.bean(Scholix.class)).repartition(5000).write().mode(SaveMode.Overwrite).save(workingDirPath+"/scholix_index");
}
2020-02-26 10:51:35 +01:00
}