2020-04-20 09:53:34 +02:00
|
|
|
package eu.dnetlib.doiboost.crossref
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
2020-04-29 13:13:02 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf
|
2020-05-28 09:57:46 +02:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.{Oaf, Publication, Relation, Dataset => OafDataset}
|
2020-04-20 09:53:34 +02:00
|
|
|
import org.apache.commons.io.IOUtils
|
|
|
|
import org.apache.hadoop.io.{IntWritable, Text}
|
|
|
|
import org.apache.spark.SparkConf
|
2020-05-28 09:57:46 +02:00
|
|
|
import org.apache.spark.rdd.RDD
|
2020-04-29 13:13:02 +02:00
|
|
|
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SaveMode, SparkSession}
|
2020-04-20 09:53:34 +02:00
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
|
|
|
|
|
|
|
|
2020-04-23 09:33:48 +02:00
|
|
|
case class Reference(author: String, firstPage: String) {}
|
2020-04-20 09:53:34 +02:00
|
|
|
|
|
|
|
object SparkMapDumpIntoOAF {
|
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
|
|
|
|
|
2020-04-23 09:33:48 +02:00
|
|
|
val logger: Logger = LoggerFactory.getLogger(SparkMapDumpIntoOAF.getClass)
|
2020-04-20 09:53:34 +02:00
|
|
|
val conf: SparkConf = new SparkConf()
|
|
|
|
val parser = new ArgumentApplicationParser(IOUtils.toString(SparkMapDumpIntoOAF.getClass.getResourceAsStream("/eu/dnetlib/dhp/doiboost/convert_map_to_oaf_params.json")))
|
|
|
|
parser.parseArgument(args)
|
|
|
|
val spark: SparkSession =
|
|
|
|
SparkSession
|
|
|
|
.builder()
|
|
|
|
.config(conf)
|
|
|
|
.appName(SparkMapDumpIntoOAF.getClass.getSimpleName)
|
|
|
|
.master(parser.get("master")).getOrCreate()
|
2020-04-29 13:13:02 +02:00
|
|
|
|
2020-11-25 17:15:54 +01:00
|
|
|
implicit val oafEncoder: Encoder[Oaf] = Encoders.kryo[Oaf]
|
2020-05-28 09:57:46 +02:00
|
|
|
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.kryo[Publication]
|
|
|
|
implicit val mapEncoderRelatons: Encoder[Relation] = Encoders.kryo[Relation]
|
|
|
|
implicit val mapEncoderDatasets: Encoder[oaf.Dataset] = Encoders.kryo[OafDataset]
|
2020-04-20 09:53:34 +02:00
|
|
|
|
|
|
|
val sc = spark.sparkContext
|
2020-04-29 13:13:02 +02:00
|
|
|
val targetPath = parser.get("targetPath")
|
2020-11-25 17:15:54 +01:00
|
|
|
import spark.implicits._
|
|
|
|
|
|
|
|
|
|
|
|
spark.read.load(parser.get("sourcePath")).as[CrossrefDT]
|
|
|
|
.flatMap(k => Crossref2Oaf.convert(k.json))
|
|
|
|
.filter(o => o != null)
|
|
|
|
.write.mode(SaveMode.Overwrite).save(s"$targetPath/mixObject")
|
|
|
|
|
|
|
|
|
|
|
|
val ds:Dataset[Oaf] = spark.read.load(s"$targetPath/mixObject").as[Oaf]
|
|
|
|
|
|
|
|
ds.filter(o => o.isInstanceOf[Publication]).map(o => o.asInstanceOf[Publication]).write.save(s"$targetPath/publication")
|
|
|
|
|
|
|
|
ds.filter(o => o.isInstanceOf[Relation]).map(o => o.asInstanceOf[Relation]).write.save(s"$targetPath/relation")
|
|
|
|
|
|
|
|
ds.filter(o => o.isInstanceOf[OafDataset]).map(o => o.asInstanceOf[OafDataset]).write.save(s"$targetPath/dataset")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
//
|
|
|
|
//
|
|
|
|
//
|
|
|
|
// sc.sequenceFile(parser.get("sourcePath"), classOf[IntWritable], classOf[Text])
|
|
|
|
// .map(k => k._2.toString).map(CrossrefImporter.decompressBlob)
|
|
|
|
// .flatMap(k => Crossref2Oaf.convert(k)).saveAsObjectFile(s"${targetPath}/mixObject")
|
|
|
|
//
|
|
|
|
// val inputRDD = sc.objectFile[Oaf](s"${targetPath}/mixObject").filter(p=> p!= null)
|
|
|
|
//
|
|
|
|
// val distinctPubs:RDD[Publication] = inputRDD.filter(k => k != null && k.isInstanceOf[Publication])
|
|
|
|
// .map(k => k.asInstanceOf[Publication]).map { p: Publication => Tuple2(p.getId, p) }.reduceByKey { case (p1: Publication, p2: Publication) =>
|
|
|
|
// var r = if (p1 == null) p2 else p1
|
|
|
|
// if (p1 != null && p2 != null) {
|
|
|
|
// if (p1.getLastupdatetimestamp != null && p2.getLastupdatetimestamp != null) {
|
|
|
|
// if (p1.getLastupdatetimestamp < p2.getLastupdatetimestamp)
|
|
|
|
// r = p2
|
|
|
|
// else
|
|
|
|
// r = p1
|
|
|
|
// } else {
|
|
|
|
// r = if (p1.getLastupdatetimestamp == null) p2 else p1
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// r
|
|
|
|
// }.map(_._2)
|
|
|
|
//
|
|
|
|
// val pubs:Dataset[Publication] = spark.createDataset(distinctPubs)
|
|
|
|
// pubs.write.mode(SaveMode.Overwrite).save(s"${targetPath}/publication")
|
|
|
|
//
|
|
|
|
//
|
|
|
|
// val distincDatasets:RDD[OafDataset] = inputRDD.filter(k => k != null && k.isInstanceOf[OafDataset])
|
|
|
|
// .map(k => k.asInstanceOf[OafDataset]).map(p => Tuple2(p.getId, p)).reduceByKey { case (p1: OafDataset, p2: OafDataset) =>
|
|
|
|
// var r = if (p1 == null) p2 else p1
|
|
|
|
// if (p1 != null && p2 != null) {
|
|
|
|
// if (p1.getLastupdatetimestamp != null && p2.getLastupdatetimestamp != null) {
|
|
|
|
// if (p1.getLastupdatetimestamp < p2.getLastupdatetimestamp)
|
|
|
|
// r = p2
|
|
|
|
// else
|
|
|
|
// r = p1
|
|
|
|
// } else {
|
|
|
|
// r = if (p1.getLastupdatetimestamp == null) p2 else p1
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// r
|
|
|
|
// }.map(_._2)
|
|
|
|
//
|
|
|
|
// spark.createDataset(distincDatasets).write.mode(SaveMode.Overwrite).save(s"${targetPath}/dataset")
|
|
|
|
//
|
|
|
|
//
|
|
|
|
//
|
|
|
|
// val distinctRels =inputRDD.filter(k => k != null && k.isInstanceOf[Relation])
|
|
|
|
// .map(k => k.asInstanceOf[Relation]).map(r=> (s"${r.getSource}::${r.getTarget}",r))
|
|
|
|
// .reduceByKey { case (p1: Relation, p2: Relation) =>
|
|
|
|
// if (p1 == null) p2 else p1
|
|
|
|
// }.map(_._2)
|
|
|
|
//
|
|
|
|
// val rels: Dataset[Relation] = spark.createDataset(distinctRels)
|
|
|
|
//
|
|
|
|
// rels.write.mode(SaveMode.Overwrite).save(s"${targetPath}/relations")
|
2020-04-20 09:53:34 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|