|
|
|
@ -4,9 +4,23 @@ package eu.dnetlib.dhp.bulktag;
|
|
|
|
|
import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
|
|
|
|
|
import java.io.BufferedOutputStream;
|
|
|
|
|
import java.io.IOException;
|
|
|
|
|
import java.nio.charset.StandardCharsets;
|
|
|
|
|
import java.util.*;
|
|
|
|
|
import java.util.stream.Collectors;
|
|
|
|
|
import java.util.zip.GZIPOutputStream;
|
|
|
|
|
|
|
|
|
|
import org.apache.avro.TestAnnotation;
|
|
|
|
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
|
|
|
|
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
|
|
|
|
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
|
import org.apache.hadoop.fs.FSDataInputStream;
|
|
|
|
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
|
import org.apache.hadoop.fs.Path;
|
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
|
import org.apache.spark.api.java.function.FilterFunction;
|
|
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
@ -17,17 +31,24 @@ import org.apache.spark.sql.SparkSession;
|
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
|
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
|
import com.google.gson.Gson;
|
|
|
|
|
import com.sun.media.sound.ModelInstrumentComparator;
|
|
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.api.Utils;
|
|
|
|
|
import eu.dnetlib.dhp.api.model.CommunityEntityMap;
|
|
|
|
|
import eu.dnetlib.dhp.api.model.EntityCommunities;
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
|
import eu.dnetlib.dhp.bulktag.community.*;
|
|
|
|
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Context;
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
|
|
|
import scala.Tuple2;
|
|
|
|
|
|
|
|
|
|
public class SparkBulkTagJob {
|
|
|
|
|
|
|
|
|
@ -47,6 +68,7 @@ public class SparkBulkTagJob {
|
|
|
|
|
.getResourceAsStream(
|
|
|
|
|
"/eu/dnetlib/dhp/wf/subworkflows/bulktag/input_bulkTag_parameters.json"));
|
|
|
|
|
|
|
|
|
|
log.info(args.toString());
|
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
|
@ -65,8 +87,20 @@ public class SparkBulkTagJob {
|
|
|
|
|
final String baseURL = parser.get("baseURL");
|
|
|
|
|
log.info("baseURL: {}", baseURL);
|
|
|
|
|
|
|
|
|
|
ProtoMap protoMappingParams = new Gson().fromJson(parser.get("pathMap"), ProtoMap.class);
|
|
|
|
|
log.info("pathMap: {}", new Gson().toJson(protoMappingParams));
|
|
|
|
|
log.info("pathMap: {}", parser.get("pathMap"));
|
|
|
|
|
String protoMappingPath = parser.get("pathMap");
|
|
|
|
|
|
|
|
|
|
final String hdfsNameNode = parser.get("nameNode");
|
|
|
|
|
log.info("nameNode: {}", hdfsNameNode);
|
|
|
|
|
|
|
|
|
|
Configuration configuration = new Configuration();
|
|
|
|
|
configuration.set("fs.defaultFS", hdfsNameNode);
|
|
|
|
|
FileSystem fs = FileSystem.get(configuration);
|
|
|
|
|
|
|
|
|
|
String temp = IOUtils.toString(fs.open(new Path(protoMappingPath)), StandardCharsets.UTF_8);
|
|
|
|
|
log.info("protoMap: {}", temp);
|
|
|
|
|
ProtoMap protoMap = new Gson().fromJson(temp, ProtoMap.class);
|
|
|
|
|
log.info("pathMap: {}", new Gson().toJson(protoMap));
|
|
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
|
CommunityConfiguration cc;
|
|
|
|
@ -88,10 +122,130 @@ public class SparkBulkTagJob {
|
|
|
|
|
isSparkSessionManaged,
|
|
|
|
|
spark -> {
|
|
|
|
|
extendCommunityConfigurationForEOSC(spark, inputPath, cc);
|
|
|
|
|
execBulkTag(spark, inputPath, outputPath, protoMappingParams, cc);
|
|
|
|
|
execBulkTag(
|
|
|
|
|
spark, inputPath, outputPath, protoMap, cc);
|
|
|
|
|
execDatasourceTag(spark, inputPath, outputPath, Utils.getDatasourceCommunities(baseURL));
|
|
|
|
|
execProjectTag(spark, inputPath, outputPath, Utils.getCommunityProjects(baseURL));
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private static void execProjectTag(SparkSession spark, String inputPath, String outputPath,
|
|
|
|
|
CommunityEntityMap communityProjects) {
|
|
|
|
|
Dataset<Project> projects = readPath(spark, inputPath + "project", Project.class);
|
|
|
|
|
Dataset<EntityCommunities> pc = spark
|
|
|
|
|
.createDataset(
|
|
|
|
|
communityProjects
|
|
|
|
|
.keySet()
|
|
|
|
|
.stream()
|
|
|
|
|
.map(k -> EntityCommunities.newInstance(k, communityProjects.get(k)))
|
|
|
|
|
.collect(Collectors.toList()),
|
|
|
|
|
Encoders.bean(EntityCommunities.class));
|
|
|
|
|
|
|
|
|
|
projects
|
|
|
|
|
.joinWith(pc, projects.col("id").equalTo(pc.col("entityId")), "left")
|
|
|
|
|
.map((MapFunction<Tuple2<Project, EntityCommunities>, Project>) t2 -> {
|
|
|
|
|
Project ds = t2._1();
|
|
|
|
|
if (t2._2() != null) {
|
|
|
|
|
List<String> context = Optional
|
|
|
|
|
.ofNullable(ds.getContext())
|
|
|
|
|
.map(v -> v.stream().map(c -> c.getId()).collect(Collectors.toList()))
|
|
|
|
|
.orElse(new ArrayList<>());
|
|
|
|
|
|
|
|
|
|
if (!Optional.ofNullable(ds.getContext()).isPresent())
|
|
|
|
|
ds.setContext(new ArrayList<>());
|
|
|
|
|
t2._2().getCommunitiesId().forEach(c -> {
|
|
|
|
|
if (!context.contains(c)) {
|
|
|
|
|
Context con = new Context();
|
|
|
|
|
con.setId(c);
|
|
|
|
|
con
|
|
|
|
|
.setDataInfo(
|
|
|
|
|
Arrays
|
|
|
|
|
.asList(
|
|
|
|
|
OafMapperUtils
|
|
|
|
|
.dataInfo(
|
|
|
|
|
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
|
|
|
|
OafMapperUtils
|
|
|
|
|
.qualifier(
|
|
|
|
|
TaggingConstants.CLASS_ID_DATASOURCE,
|
|
|
|
|
TaggingConstants.CLASS_NAME_BULKTAG_DATASOURCE,
|
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
|
|
|
"1")));
|
|
|
|
|
ds.getContext().add(con);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
return ds;
|
|
|
|
|
}, Encoders.bean(Project.class))
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.option("compression", "gzip")
|
|
|
|
|
.json(outputPath + "project");
|
|
|
|
|
|
|
|
|
|
readPath(spark, outputPath + "project", Datasource.class)
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.option("compression", "gzip")
|
|
|
|
|
.json(inputPath + "project");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private static void execDatasourceTag(SparkSession spark, String inputPath, String outputPath,
|
|
|
|
|
List<EntityCommunities> datasourceCommunities) {
|
|
|
|
|
Dataset<Datasource> datasource = readPath(spark, inputPath + "datasource", Datasource.class);
|
|
|
|
|
|
|
|
|
|
Dataset<EntityCommunities> dc = spark
|
|
|
|
|
.createDataset(datasourceCommunities, Encoders.bean(EntityCommunities.class));
|
|
|
|
|
|
|
|
|
|
datasource
|
|
|
|
|
.joinWith(dc, datasource.col("id").equalTo(dc.col("entityId")), "left")
|
|
|
|
|
.map((MapFunction<Tuple2<Datasource, EntityCommunities>, Datasource>) t2 -> {
|
|
|
|
|
Datasource ds = t2._1();
|
|
|
|
|
if (t2._2() != null) {
|
|
|
|
|
|
|
|
|
|
List<String> context = Optional
|
|
|
|
|
.ofNullable(ds.getContext())
|
|
|
|
|
.map(v -> v.stream().map(c -> c.getId()).collect(Collectors.toList()))
|
|
|
|
|
.orElse(new ArrayList<>());
|
|
|
|
|
|
|
|
|
|
if (!Optional.ofNullable(ds.getContext()).isPresent())
|
|
|
|
|
ds.setContext(new ArrayList<>());
|
|
|
|
|
|
|
|
|
|
t2._2().getCommunitiesId().forEach(c -> {
|
|
|
|
|
if (!context.contains(c)) {
|
|
|
|
|
Context con = new Context();
|
|
|
|
|
con.setId(c);
|
|
|
|
|
con
|
|
|
|
|
.setDataInfo(
|
|
|
|
|
Arrays
|
|
|
|
|
.asList(
|
|
|
|
|
OafMapperUtils
|
|
|
|
|
.dataInfo(
|
|
|
|
|
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
|
|
|
|
OafMapperUtils
|
|
|
|
|
.qualifier(
|
|
|
|
|
TaggingConstants.CLASS_ID_DATASOURCE,
|
|
|
|
|
TaggingConstants.CLASS_NAME_BULKTAG_DATASOURCE,
|
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
|
|
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
|
|
|
"1")));
|
|
|
|
|
ds.getContext().add(con);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
return ds;
|
|
|
|
|
}, Encoders.bean(Datasource.class))
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.option("compression", "gzip")
|
|
|
|
|
.json(outputPath + "datasource");
|
|
|
|
|
|
|
|
|
|
readPath(spark, outputPath + "datasource", Datasource.class)
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.option("compression", "gzip")
|
|
|
|
|
.json(inputPath + "datasource");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private static void extendCommunityConfigurationForEOSC(SparkSession spark, String inputPath,
|
|
|
|
|
CommunityConfiguration cc) {
|
|
|
|
|
|
|
|
|
@ -129,6 +283,11 @@ public class SparkBulkTagJob {
|
|
|
|
|
ProtoMap protoMappingParams,
|
|
|
|
|
CommunityConfiguration communityConfiguration) {
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
System.out.println(new ObjectMapper().writeValueAsString(protoMappingParams));
|
|
|
|
|
} catch (JsonProcessingException e) {
|
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
|
}
|
|
|
|
|
ModelSupport.entityTypes
|
|
|
|
|
.keySet()
|
|
|
|
|
.parallelStream()
|
|
|
|
|