2020-05-22 15:15:09 +02:00
|
|
|
package eu.dnetlib.doiboost.orcid
|
|
|
|
|
2020-12-07 19:59:33 +01:00
|
|
|
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
|
2020-05-22 15:15:09 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
2020-12-07 19:59:33 +01:00
|
|
|
import eu.dnetlib.dhp.oa.merge.AuthorMerger
|
2020-05-22 15:15:09 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Publication
|
2020-12-07 19:59:33 +01:00
|
|
|
import eu.dnetlib.dhp.schema.orcid.OrcidDOI
|
2020-05-29 09:32:04 +02:00
|
|
|
import eu.dnetlib.doiboost.mag.ConversionUtil
|
2020-05-22 15:15:09 +02:00
|
|
|
import org.apache.commons.io.IOUtils
|
|
|
|
import org.apache.spark.SparkConf
|
|
|
|
import org.apache.spark.rdd.RDD
|
2020-12-07 19:59:33 +01:00
|
|
|
import org.apache.spark.sql.expressions.Aggregator
|
2020-05-22 15:15:09 +02:00
|
|
|
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SaveMode, SparkSession}
|
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
|
|
|
|
|
|
|
object SparkConvertORCIDToOAF {
|
2020-12-07 19:59:33 +01:00
|
|
|
val logger: Logger = LoggerFactory.getLogger(SparkConvertORCIDToOAF.getClass)
|
2020-05-22 15:15:09 +02:00
|
|
|
|
2020-12-07 19:59:33 +01:00
|
|
|
def getPublicationAggregator(): Aggregator[(String, Publication), Publication, Publication] = new Aggregator[(String, Publication), Publication, Publication]{
|
2020-05-22 15:15:09 +02:00
|
|
|
|
2020-12-07 19:59:33 +01:00
|
|
|
override def zero: Publication = new Publication()
|
|
|
|
|
|
|
|
override def reduce(b: Publication, a: (String, Publication)): Publication = {
|
|
|
|
b.mergeFrom(a._2)
|
|
|
|
b.setAuthor(AuthorMerger.mergeAuthor(a._2.getAuthor, b.getAuthor))
|
|
|
|
if (b.getId == null)
|
|
|
|
b.setId(a._2.getId)
|
|
|
|
b
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
override def merge(wx: Publication, wy: Publication): Publication = {
|
|
|
|
wx.mergeFrom(wy)
|
|
|
|
wx.setAuthor(AuthorMerger.mergeAuthor(wy.getAuthor, wx.getAuthor))
|
|
|
|
if(wx.getId == null && wy.getId.nonEmpty)
|
|
|
|
wx.setId(wy.getId)
|
|
|
|
wx
|
|
|
|
}
|
|
|
|
override def finish(reduction: Publication): Publication = reduction
|
|
|
|
|
|
|
|
override def bufferEncoder: Encoder[Publication] =
|
|
|
|
Encoders.kryo(classOf[Publication])
|
|
|
|
|
|
|
|
override def outputEncoder: Encoder[Publication] =
|
|
|
|
Encoders.kryo(classOf[Publication])
|
|
|
|
}
|
|
|
|
|
2020-12-10 16:14:16 +01:00
|
|
|
def run(spark:SparkSession,sourcePath:String, targetPath:String):Unit = {
|
|
|
|
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.kryo[Publication]
|
|
|
|
implicit val mapOrcid: Encoder[OrcidDOI] = Encoders.kryo[OrcidDOI]
|
|
|
|
implicit val tupleForJoinEncoder: Encoder[(String, Publication)] = Encoders.tuple(Encoders.STRING, mapEncoderPubs)
|
|
|
|
|
|
|
|
val mapper = new ObjectMapper()
|
|
|
|
mapper.getDeserializationConfig.withFeatures(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
|
|
|
|
|
|
|
|
val dataset:Dataset[OrcidDOI] = spark.createDataset(spark.sparkContext.textFile(sourcePath).map(s => mapper.readValue(s,classOf[OrcidDOI])))
|
|
|
|
|
|
|
|
logger.info("Converting ORCID to OAF")
|
|
|
|
dataset.map(o => ORCIDToOAF.convertTOOAF(o)).filter(p=>p!=null)
|
|
|
|
.map(d => (d.getId, d))
|
|
|
|
.groupByKey(_._1)(Encoders.STRING)
|
|
|
|
.agg(getPublicationAggregator().toColumn)
|
|
|
|
.map(p => p._2)
|
|
|
|
.write.mode(SaveMode.Overwrite).save(targetPath)
|
|
|
|
}
|
2020-05-22 15:15:09 +02:00
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
|
2020-12-07 19:59:33 +01:00
|
|
|
|
2020-05-22 15:15:09 +02:00
|
|
|
val conf: SparkConf = new SparkConf()
|
|
|
|
val parser = new ArgumentApplicationParser(IOUtils.toString(SparkConvertORCIDToOAF.getClass.getResourceAsStream("/eu/dnetlib/dhp/doiboost/convert_map_to_oaf_params.json")))
|
|
|
|
parser.parseArgument(args)
|
|
|
|
val spark: SparkSession =
|
|
|
|
SparkSession
|
|
|
|
.builder()
|
|
|
|
.config(conf)
|
|
|
|
.appName(getClass.getSimpleName)
|
|
|
|
.master(parser.get("master")).getOrCreate()
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-12-07 19:59:33 +01:00
|
|
|
val sourcePath = parser.get("sourcePath")
|
|
|
|
val targetPath = parser.get("targetPath")
|
|
|
|
run(spark, sourcePath, targetPath)
|
2020-05-29 09:32:04 +02:00
|
|
|
|
2020-05-22 15:15:09 +02:00
|
|
|
}
|
|
|
|
|
2020-12-10 16:14:16 +01:00
|
|
|
}
|