2022-07-28 17:52:37 +02:00
|
|
|
|
|
|
|
package eu.dnetlib.dhp.oa.graph.dump.eosc;
|
|
|
|
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
|
|
|
import java.io.Serializable;
|
2023-09-26 11:14:45 +02:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.List;
|
2023-11-16 12:17:42 +01:00
|
|
|
import java.util.Objects;
|
2022-07-28 17:52:37 +02:00
|
|
|
import java.util.Optional;
|
|
|
|
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
2023-09-26 11:14:45 +02:00
|
|
|
import org.apache.spark.sql.*;
|
|
|
|
import org.apache.spark.sql.types.*;
|
2022-07-28 17:52:37 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
2023-02-28 08:55:38 +01:00
|
|
|
import eu.dnetlib.dhp.eosc.model.Result;
|
2022-07-28 17:52:37 +02:00
|
|
|
import eu.dnetlib.dhp.oa.graph.dump.ResultMapper;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @author miriam.baglioni
|
|
|
|
* @Date 27/07/22
|
|
|
|
*/
|
|
|
|
public class SelectEoscResultsJobStep1 implements Serializable {
|
|
|
|
private static final Logger log = LoggerFactory.getLogger(SelectEoscResultsJobStep1.class);
|
|
|
|
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
String jsonConfiguration = IOUtils
|
|
|
|
.toString(
|
|
|
|
SelectEoscResultsJobStep1.class
|
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/oa/graph/dump/eosc_select_result_parameters.json"));
|
|
|
|
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
.orElse(Boolean.TRUE);
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
|
|
|
final String inputPath = parser.get("sourcePath");
|
|
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
|
|
|
|
final String outputPath = parser.get("outputPath");
|
|
|
|
log.info("outputPath: {}", outputPath);
|
|
|
|
|
2023-09-26 11:14:45 +02:00
|
|
|
final String eoscDatasourceIdsPath = parser.get("eoscDatasourceIdsPath");
|
|
|
|
log.info("eoscDatasourceIdsPath: {}", eoscDatasourceIdsPath);
|
|
|
|
|
2022-08-11 14:23:55 +02:00
|
|
|
final String communityMapPath = parser.get("communityMapPath");
|
|
|
|
log.info("communityMapPath: {}", communityMapPath);
|
|
|
|
|
2022-07-28 17:52:37 +02:00
|
|
|
final String resultClassName = parser.get("resultTableName");
|
|
|
|
log.info("resultTableName: {}", resultClassName);
|
|
|
|
|
2023-02-28 08:55:38 +01:00
|
|
|
Class<? extends eu.dnetlib.dhp.schema.oaf.Result> inputClazz = (Class<? extends eu.dnetlib.dhp.schema.oaf.Result>) Class
|
|
|
|
.forName(resultClassName);
|
2022-07-28 17:52:37 +02:00
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
|
|
|
|
runWithSparkSession(
|
|
|
|
conf,
|
|
|
|
isSparkSessionManaged,
|
|
|
|
spark -> {
|
|
|
|
Utils.removeOutputDir(spark, outputPath);
|
2023-09-26 11:14:45 +02:00
|
|
|
selectEoscResults(spark, inputPath, outputPath, inputClazz, communityMapPath, eoscDatasourceIdsPath);
|
2022-07-28 17:52:37 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2023-02-28 08:55:38 +01:00
|
|
|
private static <R extends eu.dnetlib.dhp.schema.oaf.Result> void selectEoscResults(SparkSession spark,
|
|
|
|
String inputPath, String outputPath,
|
2023-09-26 11:14:45 +02:00
|
|
|
Class<R> inputClazz, String communityMapPath, String eoscDatasourceIdsPath) {
|
|
|
|
|
|
|
|
List<MasterDuplicate> df = Utils
|
|
|
|
.readPath(spark, eoscDatasourceIdsPath, MasterDuplicate.class)
|
|
|
|
.collectAsList();
|
|
|
|
log.info("number of rows ************: " + df.size());
|
2022-08-11 14:23:55 +02:00
|
|
|
CommunityMap communityMap = Utils.getCommunityMap(spark, communityMapPath);
|
2022-07-28 17:52:37 +02:00
|
|
|
Utils
|
|
|
|
.readPath(spark, inputPath, inputClazz)
|
|
|
|
.filter(
|
|
|
|
(FilterFunction<R>) r -> !r.getDataInfo().getDeletedbyinference() && !r.getDataInfo().getInvisible()
|
2023-07-10 18:42:28 +02:00
|
|
|
&& (r.getContext().stream().anyMatch(c -> c.getId().equals("eosc")) ||
|
|
|
|
r
|
|
|
|
.getCollectedfrom()
|
|
|
|
.stream()
|
2023-09-26 11:14:45 +02:00
|
|
|
.anyMatch(cf -> cf.getValue().equalsIgnoreCase("B2FIND"))))
|
2022-07-28 17:52:37 +02:00
|
|
|
.map(
|
2023-02-28 08:55:38 +01:00
|
|
|
(MapFunction<R, Result>) r -> (Result) ResultMapper
|
2023-09-26 11:14:45 +02:00
|
|
|
.map(r, communityMap, df),
|
2023-02-28 08:55:38 +01:00
|
|
|
Encoders.bean(Result.class))
|
2023-11-16 12:17:42 +01:00
|
|
|
.filter(Objects::nonNull)
|
2022-07-28 17:52:37 +02:00
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(outputPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|