134 lines
4.4 KiB
Java
134 lines
4.4 KiB
Java
|
|
package eu.dnetlib.dhp.oa.graph.dump.subset;
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
import java.io.Serializable;
|
|
import java.util.*;
|
|
import java.util.stream.Collectors;
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
import org.apache.spark.SparkConf;
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
|
import org.apache.spark.api.java.function.ForeachFunction;
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
import org.apache.spark.sql.Dataset;
|
|
import org.apache.spark.sql.Encoders;
|
|
import org.apache.spark.sql.SaveMode;
|
|
import org.apache.spark.sql.SparkSession;
|
|
import org.slf4j.Logger;
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
|
import eu.dnetlib.dhp.oa.model.graph.ResearchCommunity;
|
|
import eu.dnetlib.dhp.schema.oaf.*;
|
|
import scala.Tuple2;
|
|
|
|
/**
|
|
* @author miriam.baglioni
|
|
* @Date 15/11/22
|
|
*/
|
|
public class SparkSelectValidContext implements Serializable {
|
|
private static final Logger log = LoggerFactory.getLogger(SparkSelectValidContext.class);
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
String jsonConfiguration = IOUtils
|
|
.toString(
|
|
SparkSelectValidContext.class
|
|
.getResourceAsStream(
|
|
"/eu/dnetlib/dhp/oa/graph/dump/input_select_context.json"));
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
parser.parseArgument(args);
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
.map(Boolean::valueOf)
|
|
.orElse(Boolean.TRUE);
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
final String inputPath = parser.get("sourcePath");
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
final String contextPath = parser.get("contextPath");
|
|
log.info("contextPath: {}", contextPath);
|
|
|
|
final String communityMapPath = parser.get("communityMapPath");
|
|
log.info("communityMapPath: {}", communityMapPath);
|
|
|
|
final String outputPath = parser.get("outputPath");
|
|
log.info("outputPath: {}", outputPath);
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
runWithSparkSession(
|
|
conf,
|
|
isSparkSessionManaged,
|
|
spark -> {
|
|
selectValidContext(spark, inputPath, contextPath, communityMapPath, outputPath);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
private static void selectValidContext(SparkSession spark, String inputPath, String contextPath,
|
|
String communityMapPath, String outputPath) {
|
|
List<String> keys = Arrays
|
|
.asList(Utils.getCommunityMap(spark, communityMapPath).keySet().stream().toArray(String[]::new));
|
|
Dataset<String> context = getFilter(spark, inputPath + "/publication", keys, Publication.class)
|
|
.union(getFilter(spark, inputPath + "/dataset", keys, eu.dnetlib.dhp.schema.oaf.Dataset.class))
|
|
.union(getFilter(spark, inputPath + "/software", keys, Software.class))
|
|
.union(getFilter(spark, inputPath + "/otherresearchproduct", keys, OtherResearchProduct.class))
|
|
.distinct();
|
|
|
|
context.foreach((ForeachFunction<String>) c -> System.out.println(c));
|
|
|
|
Dataset<ResearchCommunity> researchCommunity = Utils.readPath(spark, contextPath, ResearchCommunity.class);
|
|
|
|
researchCommunity
|
|
.joinWith(context, researchCommunity.col("acronym").equalTo(context.col("value")))
|
|
.map(
|
|
(MapFunction<Tuple2<ResearchCommunity, String>, ResearchCommunity>) t2 -> t2._1(),
|
|
Encoders.bean(ResearchCommunity.class))
|
|
.write()
|
|
.mode(SaveMode.Overwrite)
|
|
.option("compression", "gzip")
|
|
.json(outputPath);
|
|
|
|
}
|
|
|
|
private static <I extends Result> Dataset<String> getFilter(SparkSession spark, String inputPath,
|
|
List<String> keys, Class<I> inputClazz) {
|
|
|
|
return Utils
|
|
.readPath(spark, inputPath, inputClazz)
|
|
.filter((FilterFunction<I>) r -> isPresentContext(r))
|
|
.flatMap(
|
|
(FlatMapFunction<I, String>) r -> r
|
|
.getContext()
|
|
.stream()
|
|
.map(c -> extract(c.getId(), keys))
|
|
.collect(Collectors.toList())
|
|
.iterator(),
|
|
Encoders.STRING())
|
|
.filter(Objects::nonNull);
|
|
|
|
}
|
|
|
|
private static <I extends Result> boolean isPresentContext(I r) {
|
|
return Optional.ofNullable(r.getContext()).isPresent();
|
|
}
|
|
|
|
private static String extract(String c, List<String> keySet) {
|
|
if (keySet.contains(c))
|
|
return c;
|
|
if (c.contains(":") && keySet.contains(c.substring(0, c.indexOf(":"))))
|
|
return c.substring(0, c.indexOf(":"));
|
|
return null;
|
|
}
|
|
}
|