2020-04-30 11:05:17 +02:00
|
|
|
|
2020-04-24 10:47:43 +02:00
|
|
|
package eu.dnetlib.dhp.bulktag;
|
2020-04-21 16:03:51 +02:00
|
|
|
|
2020-05-14 10:25:41 +02:00
|
|
|
import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
2020-05-11 17:38:08 +02:00
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
2024-02-19 16:12:59 +01:00
|
|
|
import java.io.BufferedOutputStream;
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.nio.charset.StandardCharsets;
|
2023-04-18 17:39:31 +02:00
|
|
|
import java.util.*;
|
2024-02-13 16:37:14 +01:00
|
|
|
import java.util.stream.Collectors;
|
2024-02-19 16:12:59 +01:00
|
|
|
import java.util.zip.GZIPOutputStream;
|
2024-02-13 16:37:14 +01:00
|
|
|
|
2024-02-19 16:12:59 +01:00
|
|
|
import org.apache.avro.TestAnnotation;
|
|
|
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
|
|
|
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
|
|
|
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
2020-04-21 16:03:51 +02:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2024-02-19 16:12:59 +01:00
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
import org.apache.hadoop.fs.FSDataInputStream;
|
|
|
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2020-04-21 16:03:51 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
2023-04-18 17:39:31 +02:00
|
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
2020-04-21 16:03:51 +02:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SaveMode;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2024-02-19 16:12:59 +01:00
|
|
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
2020-05-11 17:38:08 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
import com.google.gson.Gson;
|
2024-02-19 16:12:59 +01:00
|
|
|
import com.sun.media.sound.ModelInstrumentComparator;
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2023-10-09 14:52:17 +02:00
|
|
|
import eu.dnetlib.dhp.api.Utils;
|
2024-02-14 14:57:08 +01:00
|
|
|
import eu.dnetlib.dhp.api.model.CommunityEntityMap;
|
|
|
|
import eu.dnetlib.dhp.api.model.EntityCommunities;
|
2020-05-11 17:38:08 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.bulktag.community.*;
|
2024-02-14 14:57:08 +01:00
|
|
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
2023-10-11 18:17:35 +02:00
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
2024-02-14 14:57:08 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Context;
|
2023-04-18 17:39:31 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
2024-02-14 14:57:08 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
2020-05-11 17:38:08 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
2023-10-19 12:13:45 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
2024-02-13 16:37:14 +01:00
|
|
|
import scala.Tuple2;
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2020-05-07 18:22:26 +02:00
|
|
|
public class SparkBulkTagJob {
|
2020-04-21 16:03:51 +02:00
|
|
|
|
2023-04-18 17:39:31 +02:00
|
|
|
private static String OPENAIRE_3 = "openaire3.0";
|
|
|
|
private static String OPENAIRE_4 = "openaire-pub_4.0";
|
|
|
|
private static String OPENAIRE_CRIS = "openaire-cris_1.1";
|
|
|
|
private static String OPENAIRE_DATA = "openaire2.0_data";
|
|
|
|
private static String EOSC = "10|openaire____::2e06c1122c7df43765fdcf91080824fa";
|
|
|
|
|
2020-05-07 18:22:26 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(SparkBulkTagJob.class);
|
2020-05-08 13:08:56 +02:00
|
|
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
String jsonConfiguration = IOUtils
|
|
|
|
.toString(
|
2020-05-07 18:22:26 +02:00
|
|
|
SparkBulkTagJob.class
|
2020-04-30 11:05:17 +02:00
|
|
|
.getResourceAsStream(
|
2023-12-22 11:35:37 +01:00
|
|
|
"/eu/dnetlib/dhp/wf/subworkflows/bulktag/input_bulkTag_parameters.json"));
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2024-02-14 14:57:08 +01:00
|
|
|
log.info(args.toString());
|
2020-04-30 11:05:17 +02:00
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
.orElse(Boolean.TRUE);
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
|
|
|
final String inputPath = parser.get("sourcePath");
|
|
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
|
|
|
|
final String outputPath = parser.get("outputPath");
|
|
|
|
log.info("outputPath: {}", outputPath);
|
|
|
|
|
2023-11-14 14:53:34 +01:00
|
|
|
final String baseURL = parser.get("baseURL");
|
|
|
|
log.info("baseURL: {}", baseURL);
|
2023-10-11 18:17:35 +02:00
|
|
|
|
2024-02-14 14:57:08 +01:00
|
|
|
log.info("pathMap: {}", parser.get("pathMap"));
|
2024-02-19 16:12:59 +01:00
|
|
|
String protoMappingPath = parser.get("pathMap");
|
2024-03-26 14:25:19 +01:00
|
|
|
|
2024-02-19 16:12:59 +01:00
|
|
|
final String hdfsNameNode = parser.get("nameNode");
|
|
|
|
log.info("nameNode: {}", hdfsNameNode);
|
|
|
|
|
|
|
|
Configuration configuration = new Configuration();
|
|
|
|
configuration.set("fs.defaultFS", hdfsNameNode);
|
|
|
|
FileSystem fs = FileSystem.get(configuration);
|
|
|
|
|
|
|
|
String temp = IOUtils.toString(fs.open(new Path(protoMappingPath)), StandardCharsets.UTF_8);
|
|
|
|
log.info("protoMap: {}", temp);
|
|
|
|
ProtoMap protoMap = new Gson().fromJson(temp, ProtoMap.class);
|
|
|
|
log.info("pathMap: {}", new Gson().toJson(protoMap));
|
2020-04-30 11:05:17 +02:00
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
CommunityConfiguration cc;
|
|
|
|
|
2023-10-11 18:17:35 +02:00
|
|
|
String taggingConf = Optional
|
|
|
|
.ofNullable(parser.get("taggingConf"))
|
|
|
|
.map(String::valueOf)
|
|
|
|
.orElse(null);
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2023-10-11 18:17:35 +02:00
|
|
|
if (taggingConf != null) {
|
2020-04-30 11:05:17 +02:00
|
|
|
cc = CommunityConfigurationFactory.newInstance(taggingConf);
|
|
|
|
} else {
|
2023-11-14 14:53:34 +01:00
|
|
|
cc = Utils.getCommunityConfiguration(baseURL);
|
2023-11-27 15:10:27 +01:00
|
|
|
log.info(OBJECT_MAPPER.writeValueAsString(cc));
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
runWithSparkSession(
|
|
|
|
conf,
|
|
|
|
isSparkSessionManaged,
|
|
|
|
spark -> {
|
2023-04-18 17:39:31 +02:00
|
|
|
extendCommunityConfigurationForEOSC(spark, inputPath, cc);
|
2024-02-19 16:12:59 +01:00
|
|
|
execBulkTag(
|
|
|
|
spark, inputPath, outputPath, protoMap, cc);
|
2024-02-13 16:37:14 +01:00
|
|
|
execDatasourceTag(spark, inputPath, outputPath, Utils.getDatasourceCommunities(baseURL));
|
|
|
|
execProjectTag(spark, inputPath, outputPath, Utils.getCommunityProjects(baseURL));
|
2020-04-30 11:05:17 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2024-02-14 14:57:08 +01:00
|
|
|
private static void execProjectTag(SparkSession spark, String inputPath, String outputPath,
|
|
|
|
CommunityEntityMap communityProjects) {
|
2024-02-13 16:37:14 +01:00
|
|
|
Dataset<Project> projects = readPath(spark, inputPath + "project", Project.class);
|
2024-02-14 14:57:08 +01:00
|
|
|
Dataset<EntityCommunities> pc = spark
|
|
|
|
.createDataset(
|
|
|
|
communityProjects
|
|
|
|
.keySet()
|
|
|
|
.stream()
|
|
|
|
.map(k -> EntityCommunities.newInstance(k, communityProjects.get(k)))
|
|
|
|
.collect(Collectors.toList()),
|
|
|
|
Encoders.bean(EntityCommunities.class));
|
|
|
|
|
|
|
|
projects
|
|
|
|
.joinWith(pc, projects.col("id").equalTo(pc.col("entityId")), "left")
|
|
|
|
.map((MapFunction<Tuple2<Project, EntityCommunities>, Project>) t2 -> {
|
|
|
|
Project ds = t2._1();
|
|
|
|
if (t2._2() != null) {
|
|
|
|
List<String> context = Optional
|
|
|
|
.ofNullable(ds.getContext())
|
|
|
|
.map(v -> v.stream().map(c -> c.getId()).collect(Collectors.toList()))
|
|
|
|
.orElse(new ArrayList<>());
|
|
|
|
|
|
|
|
if (!Optional.ofNullable(ds.getContext()).isPresent())
|
|
|
|
ds.setContext(new ArrayList<>());
|
|
|
|
t2._2().getCommunitiesId().forEach(c -> {
|
|
|
|
if (!context.contains(c)) {
|
|
|
|
Context con = new Context();
|
|
|
|
con.setId(c);
|
|
|
|
con
|
|
|
|
.setDataInfo(
|
|
|
|
Arrays
|
|
|
|
.asList(
|
|
|
|
OafMapperUtils
|
|
|
|
.dataInfo(
|
|
|
|
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
|
|
|
OafMapperUtils
|
|
|
|
.qualifier(
|
|
|
|
TaggingConstants.CLASS_ID_DATASOURCE,
|
|
|
|
TaggingConstants.CLASS_NAME_BULKTAG_DATASOURCE,
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
|
|
"1")));
|
|
|
|
ds.getContext().add(con);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return ds;
|
|
|
|
}, Encoders.bean(Project.class))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(outputPath + "project");
|
2024-02-13 16:37:14 +01:00
|
|
|
|
|
|
|
readPath(spark, outputPath + "project", Datasource.class)
|
2024-02-14 14:57:08 +01:00
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + "project");
|
2024-02-13 16:37:14 +01:00
|
|
|
}
|
|
|
|
|
2024-02-14 14:57:08 +01:00
|
|
|
private static void execDatasourceTag(SparkSession spark, String inputPath, String outputPath,
|
|
|
|
List<EntityCommunities> datasourceCommunities) {
|
2024-02-13 16:37:14 +01:00
|
|
|
Dataset<Datasource> datasource = readPath(spark, inputPath + "datasource", Datasource.class);
|
|
|
|
|
2024-02-14 14:57:08 +01:00
|
|
|
Dataset<EntityCommunities> dc = spark
|
|
|
|
.createDataset(datasourceCommunities, Encoders.bean(EntityCommunities.class));
|
|
|
|
|
|
|
|
datasource
|
|
|
|
.joinWith(dc, datasource.col("id").equalTo(dc.col("entityId")), "left")
|
|
|
|
.map((MapFunction<Tuple2<Datasource, EntityCommunities>, Datasource>) t2 -> {
|
|
|
|
Datasource ds = t2._1();
|
|
|
|
if (t2._2() != null) {
|
|
|
|
|
|
|
|
List<String> context = Optional
|
|
|
|
.ofNullable(ds.getContext())
|
|
|
|
.map(v -> v.stream().map(c -> c.getId()).collect(Collectors.toList()))
|
|
|
|
.orElse(new ArrayList<>());
|
|
|
|
|
|
|
|
if (!Optional.ofNullable(ds.getContext()).isPresent())
|
|
|
|
ds.setContext(new ArrayList<>());
|
|
|
|
|
|
|
|
t2._2().getCommunitiesId().forEach(c -> {
|
|
|
|
if (!context.contains(c)) {
|
|
|
|
Context con = new Context();
|
|
|
|
con.setId(c);
|
|
|
|
con
|
|
|
|
.setDataInfo(
|
|
|
|
Arrays
|
|
|
|
.asList(
|
|
|
|
OafMapperUtils
|
|
|
|
.dataInfo(
|
|
|
|
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
|
|
|
OafMapperUtils
|
|
|
|
.qualifier(
|
|
|
|
TaggingConstants.CLASS_ID_DATASOURCE,
|
|
|
|
TaggingConstants.CLASS_NAME_BULKTAG_DATASOURCE,
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
|
|
"1")));
|
|
|
|
ds.getContext().add(con);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return ds;
|
|
|
|
}, Encoders.bean(Datasource.class))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(outputPath + "datasource");
|
|
|
|
|
2024-02-13 16:37:14 +01:00
|
|
|
readPath(spark, outputPath + "datasource", Datasource.class)
|
2024-02-14 14:57:08 +01:00
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + "datasource");
|
2024-02-13 16:37:14 +01:00
|
|
|
}
|
|
|
|
|
2023-04-18 17:39:31 +02:00
|
|
|
private static void extendCommunityConfigurationForEOSC(SparkSession spark, String inputPath,
|
|
|
|
CommunityConfiguration cc) {
|
|
|
|
|
|
|
|
Dataset<String> datasources = readPath(
|
|
|
|
spark, inputPath
|
2023-10-16 11:26:07 +02:00
|
|
|
+ "datasource",
|
2023-04-18 17:39:31 +02:00
|
|
|
Datasource.class)
|
|
|
|
.filter((FilterFunction<Datasource>) ds -> isOKDatasource(ds))
|
|
|
|
.map((MapFunction<Datasource, String>) ds -> ds.getId(), Encoders.STRING());
|
|
|
|
|
|
|
|
Map<String, List<Pair<String, SelectionConstraints>>> dsm = cc.getEoscDatasourceMap();
|
|
|
|
|
|
|
|
for (String ds : datasources.collectAsList()) {
|
2023-10-16 11:26:07 +02:00
|
|
|
if (!dsm.containsKey(ds)) {
|
2023-04-18 17:39:31 +02:00
|
|
|
ArrayList<Pair<String, SelectionConstraints>> eoscList = new ArrayList<>();
|
2023-10-16 11:26:07 +02:00
|
|
|
dsm.put(ds, eoscList);
|
2023-04-18 17:39:31 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
private static boolean isOKDatasource(Datasource ds) {
|
|
|
|
final String compatibility = ds.getOpenairecompatibility().getClassid();
|
2023-12-09 15:20:11 +01:00
|
|
|
return (compatibility.equalsIgnoreCase(OPENAIRE_3) ||
|
2023-04-18 17:39:31 +02:00
|
|
|
compatibility.equalsIgnoreCase(OPENAIRE_4) ||
|
|
|
|
compatibility.equalsIgnoreCase(OPENAIRE_CRIS) ||
|
|
|
|
compatibility.equalsIgnoreCase(OPENAIRE_DATA)) &&
|
|
|
|
ds.getCollectedfrom().stream().anyMatch(cf -> cf.getKey().equals(EOSC));
|
|
|
|
}
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
private static <R extends Result> void execBulkTag(
|
|
|
|
SparkSession spark,
|
|
|
|
String inputPath,
|
|
|
|
String outputPath,
|
|
|
|
ProtoMap protoMappingParams,
|
|
|
|
CommunityConfiguration communityConfiguration) {
|
|
|
|
|
2024-02-19 16:12:59 +01:00
|
|
|
try {
|
|
|
|
System.out.println(new ObjectMapper().writeValueAsString(protoMappingParams));
|
|
|
|
} catch (JsonProcessingException e) {
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
}
|
2023-10-11 18:17:35 +02:00
|
|
|
ModelSupport.entityTypes
|
|
|
|
.keySet()
|
|
|
|
.parallelStream()
|
2023-12-05 09:08:48 +01:00
|
|
|
.filter(ModelSupport::isResult)
|
2023-10-11 18:17:35 +02:00
|
|
|
.forEach(e -> {
|
|
|
|
removeOutputDir(spark, outputPath + e.name());
|
|
|
|
ResultTagger resultTagger = new ResultTagger();
|
|
|
|
Class<R> resultClazz = ModelSupport.entityTypes.get(e);
|
|
|
|
readPath(spark, inputPath + e.name(), resultClazz)
|
|
|
|
.map(patchResult(), Encoders.bean(resultClazz))
|
|
|
|
.filter(Objects::nonNull)
|
|
|
|
.map(
|
|
|
|
(MapFunction<R, R>) value -> resultTagger
|
|
|
|
.enrichContextCriteria(
|
|
|
|
value, communityConfiguration, protoMappingParams),
|
|
|
|
Encoders.bean(resultClazz))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
2023-12-09 15:20:11 +01:00
|
|
|
.json(outputPath + e.name());// writing the tagging in the working dir for entity
|
2023-12-07 09:59:52 +01:00
|
|
|
|
2023-12-09 15:20:11 +01:00
|
|
|
readPath(spark, outputPath + e.name(), resultClazz) // copy the tagging in the actual result output path
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + e.name());
|
2023-10-11 18:17:35 +02:00
|
|
|
});
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
|
|
|
|
2020-05-08 13:08:56 +02:00
|
|
|
public static <R> Dataset<R> readPath(
|
|
|
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
2020-04-30 11:05:17 +02:00
|
|
|
return spark
|
|
|
|
.read()
|
2020-05-08 13:08:56 +02:00
|
|
|
.textFile(inputPath)
|
|
|
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
2020-04-30 11:05:17 +02:00
|
|
|
}
|
2020-05-07 18:22:26 +02:00
|
|
|
|
2020-05-26 10:28:35 +02:00
|
|
|
// TODO remove this hack as soon as the values fixed by this method will be provided as NON null
|
|
|
|
private static <R extends Result> MapFunction<R, R> patchResult() {
|
2021-05-14 10:58:12 +02:00
|
|
|
return r -> {
|
2023-10-19 12:13:45 +02:00
|
|
|
if (Objects.isNull(r.getDataInfo())) {
|
|
|
|
r.setDataInfo(OafMapperUtils.dataInfo(false, "", false, false, OafMapperUtils.unknown("", ""), ""));
|
|
|
|
} else if (r.getDataInfo().getDeletedbyinference() == null) {
|
2020-05-26 10:28:35 +02:00
|
|
|
r.getDataInfo().setDeletedbyinference(false);
|
|
|
|
}
|
2023-10-19 12:13:45 +02:00
|
|
|
if (Objects.isNull(r.getContext())) {
|
2020-05-26 10:28:35 +02:00
|
|
|
r.setContext(new ArrayList<>());
|
|
|
|
}
|
|
|
|
return r;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-04-21 16:03:51 +02:00
|
|
|
}
|