package eu.dnetlib.doiboost.orcid; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.function.Function; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.esotericsoftware.minlog.Log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.JsonElement; import com.google.gson.JsonParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.schema.orcid.AuthorData; import eu.dnetlib.doiboost.orcid.model.WorkData; import scala.Tuple2; public class SparkGenerateDoiAuthorList { public static void main(String[] args) throws IOException, Exception { Logger logger = LoggerFactory.getLogger(SparkGenerateDoiAuthorList.class); logger.info("[ SparkGenerateDoiAuthorList STARTED]"); final ArgumentApplicationParser parser = new ArgumentApplicationParser( IOUtils .toString( SparkGenerateDoiAuthorList.class .getResourceAsStream( "/eu/dnetlib/dhp/doiboost/gen_doi_author_list_orcid_parameters.json"))); parser.parseArgument(args); Boolean isSparkSessionManaged = Optional .ofNullable(parser.get("isSparkSessionManaged")) .map(Boolean::valueOf) .orElse(Boolean.TRUE); logger.info("isSparkSessionManaged: {}", isSparkSessionManaged); final String workingPath = parser.get("workingPath"); logger.info("workingPath: ", workingPath); final String outputDoiAuthorListPath = parser.get("outputDoiAuthorListPath"); logger.info("outputDoiAuthorListPath: ", outputDoiAuthorListPath); SparkConf conf = new SparkConf(); runWithSparkSession( conf, isSparkSessionManaged, spark -> { JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaPairRDD summariesRDD = sc .sequenceFile(workingPath + "../orcid_summaries/output/authors.seq", Text.class, Text.class); Dataset summariesDataset = spark .createDataset( summariesRDD.map(seq -> loadAuthorFromJson(seq._1(), seq._2())).rdd(), Encoders.bean(AuthorData.class)); JavaPairRDD activitiesRDD = sc .sequenceFile(workingPath + "/output/*.seq", Text.class, Text.class); Dataset activitiesDataset = spark .createDataset( activitiesRDD.map(seq -> loadWorkFromJson(seq._1(), seq._2())).rdd(), Encoders.bean(WorkData.class)); Function, Tuple2>> toAuthorListFunction = data -> { try { String doi = data._1(); if (doi == null) { return null; } AuthorData author = data._2(); if (author == null) { return null; } List toAuthorList = Arrays.asList(author); return new Tuple2<>(doi, toAuthorList); } catch (Exception e) { Log.error("toAuthorListFunction ERROR", e); return null; } }; JavaRDD>> doisRDD = activitiesDataset .joinWith( summariesDataset, activitiesDataset.col("oid").equalTo(summariesDataset.col("oid")), "inner") .map( (MapFunction, Tuple2>) value -> { WorkData w = value._1; AuthorData a = value._2; return new Tuple2<>(w.getDoi(), a); }, Encoders.tuple(Encoders.STRING(), Encoders.bean(AuthorData.class))) .filter(Objects::nonNull) .toJavaRDD() .map(toAuthorListFunction); JavaPairRDD .fromJavaRDD(doisRDD) .reduceByKey((d1, d2) -> { try { if (d1 != null && d2 != null) { Stream mergedStream = Stream .concat( d1.stream(), d2.stream()); List mergedAuthors = mergedStream.collect(Collectors.toList()); return mergedAuthors; } if (d1 != null) { return d1; } if (d2 != null) { return d2; } } catch (Exception e) { Log.error("mergeAuthorsFunction ERROR", e); return null; } return null; }) .mapToPair( s -> { ObjectMapper mapper = new ObjectMapper(); return new Tuple2<>(s._1(), mapper.writeValueAsString(s._2())); }) .repartition(10) .saveAsTextFile(workingPath + outputDoiAuthorListPath); }); } private static AuthorData loadAuthorFromJson(Text orcidId, Text json) { AuthorData authorData = new AuthorData(); authorData.setOid(orcidId.toString()); JsonElement jElement = new JsonParser().parse(json.toString()); authorData.setName(getJsonValue(jElement, "name")); authorData.setSurname(getJsonValue(jElement, "surname")); authorData.setCreditName(getJsonValue(jElement, "creditname")); return authorData; } private static WorkData loadWorkFromJson(Text orcidId, Text json) { WorkData workData = new WorkData(); workData.setOid(orcidId.toString()); JsonElement jElement = new JsonParser().parse(json.toString()); workData.setDoi(getJsonValue(jElement, "doi")); return workData; } private static String getJsonValue(JsonElement jElement, String property) { if (jElement.getAsJsonObject().has(property)) { JsonElement name = null; name = jElement.getAsJsonObject().get(property); if (name != null && !name.isJsonNull()) { return name.getAsString(); } } return null; } }