94 lines
2.8 KiB
Java
94 lines
2.8 KiB
Java
|
|
package eu.dnetlib.dhp.oa.graph.dump.filterentities;
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
import java.io.Serializable;
|
|
import java.util.Optional;
|
|
import java.util.stream.Collectors;
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
import org.apache.spark.SparkConf;
|
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
import org.apache.spark.sql.*;
|
|
import org.slf4j.Logger;
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
import eu.dnetlib.dhp.oa.graph.dump.skgif.EmitFromEntities;
|
|
import eu.dnetlib.dhp.oa.graph.dump.skgif.Utils;
|
|
import eu.dnetlib.dhp.oa.graph.dump.skgif.beans.EmitPerManifestation;
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
import scala.Tuple2;
|
|
|
|
/**
|
|
* @author miriam.baglioni
|
|
* @Date 12/03/24
|
|
*/
|
|
public class FilterEntities implements Serializable {
|
|
private static final Logger log = LoggerFactory.getLogger(EmitFromEntities.class);
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
String jsonConfiguration = IOUtils
|
|
.toString(
|
|
FilterEntities.class
|
|
.getResourceAsStream(
|
|
"/eu/dnetlib/dhp/oa/graph/dump/filter_entities_parameters.json"));
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
parser.parseArgument(args);
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
.map(Boolean::valueOf)
|
|
.orElse(Boolean.TRUE);
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
final String inputPath = parser.get("sourcePath");
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
final String filterPath = parser.get("filterPath");
|
|
log.info("filterPath: {}", filterPath);
|
|
|
|
final String workingDir = parser.get("workingDir");
|
|
log.info("workingDir: {}", workingDir);
|
|
SparkConf conf = new SparkConf();
|
|
|
|
runWithSparkSession(
|
|
conf,
|
|
isSparkSessionManaged,
|
|
spark -> {
|
|
|
|
filterEntities(spark, inputPath, filterPath, workingDir);
|
|
|
|
});
|
|
}
|
|
|
|
private static <R extends Result> void filterEntities(SparkSession spark, String inputPath, String filterPath,
|
|
String workingDir) {
|
|
ModelSupport.entityTypes.keySet().forEach(e -> {
|
|
if (ModelSupport.isResult(e)) {
|
|
Class<R> resultClazz = ModelSupport.entityTypes.get(e);
|
|
|
|
Dataset<R> result = Utils
|
|
.readPath(spark, inputPath + e.name(), resultClazz);
|
|
|
|
Dataset<Row> filterIds = spark.read().parquet(filterPath + e.name() + "_ids");
|
|
|
|
result
|
|
.join(filterIds, result.col("id").equalTo(filterIds.col("id")), "leftsemi")
|
|
.write()
|
|
.mode(SaveMode.Overwrite)
|
|
.option("compression", "gzip")
|
|
.json(workingDir + e.name());
|
|
|
|
}
|
|
|
|
});
|
|
}
|
|
}
|