dnet-hadoop/dhp-workflows/dhp-doiboost/src/main/java/eu/dnetlib/doiboost/crossref/SparkMapDumpIntoOAF.scala

61 lines
2.5 KiB
Scala
Raw Normal View History

package eu.dnetlib.doiboost.crossref
import eu.dnetlib.dhp.application.ArgumentApplicationParser
2020-04-29 13:13:02 +02:00
import eu.dnetlib.dhp.schema.oaf
import eu.dnetlib.dhp.schema.oaf.{Oaf, Publication, Relation, Result}
import org.apache.commons.io.IOUtils
import org.apache.hadoop.io.{IntWritable, Text}
import org.apache.spark.SparkConf
2020-04-29 13:13:02 +02:00
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SaveMode, SparkSession}
import org.slf4j.{Logger, LoggerFactory}
2020-04-23 09:33:48 +02:00
case class Reference(author: String, firstPage: String) {}
object SparkMapDumpIntoOAF {
def main(args: Array[String]): Unit = {
2020-04-23 09:33:48 +02:00
val logger: Logger = LoggerFactory.getLogger(SparkMapDumpIntoOAF.getClass)
val conf: SparkConf = new SparkConf()
val parser = new ArgumentApplicationParser(IOUtils.toString(SparkMapDumpIntoOAF.getClass.getResourceAsStream("/eu/dnetlib/dhp/doiboost/convert_map_to_oaf_params.json")))
parser.parseArgument(args)
val spark: SparkSession =
SparkSession
.builder()
.config(conf)
.appName(SparkMapDumpIntoOAF.getClass.getSimpleName)
.master(parser.get("master")).getOrCreate()
2020-04-29 13:13:02 +02:00
2020-05-11 09:38:27 +02:00
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.bean(classOf[Publication])
implicit val mapEncoderRelatons: Encoder[Relation] = Encoders.bean(classOf[Relation])
implicit val mapEncoderDatasets: Encoder[oaf.Dataset] = Encoders.bean(classOf[eu.dnetlib.dhp.schema.oaf.Dataset])
val sc = spark.sparkContext
2020-04-29 13:13:02 +02:00
val targetPath = parser.get("targetPath")
2020-04-23 09:33:48 +02:00
2020-04-29 13:13:02 +02:00
sc.sequenceFile(parser.get("sourcePath"), classOf[IntWritable], classOf[Text])
2020-04-23 09:33:48 +02:00
.map(k => k._2.toString).map(CrossrefImporter.decompressBlob)
2020-04-29 13:13:02 +02:00
.flatMap(k => Crossref2Oaf.convert(k)).saveAsObjectFile(s"${targetPath}/mixObject")
val inputRDD = sc.objectFile[Oaf](s"${targetPath}/mixObject").filter(p=> p!= null)
val pubs: Dataset[Publication] = spark.createDataset(inputRDD.filter(k => k != null && k.isInstanceOf[Publication])
.map(k => k.asInstanceOf[Publication]))
pubs.write.mode(SaveMode.Overwrite).save(s"${targetPath}/publication")
val ds: Dataset[eu.dnetlib.dhp.schema.oaf.Dataset] = spark.createDataset(inputRDD.filter(k => k != null && k.isInstanceOf[eu.dnetlib.dhp.schema.oaf.Dataset])
.map(k => k.asInstanceOf[eu.dnetlib.dhp.schema.oaf.Dataset]))
ds.write.mode(SaveMode.Overwrite).save(s"${targetPath}/dataset")
val rels: Dataset[Relation] = spark.createDataset(inputRDD.filter(k => k != null && k.isInstanceOf[Relation])
.map(k => k.asInstanceOf[Relation]))
rels.write.mode(SaveMode.Overwrite).save(s"${targetPath}/relations")
}
}