2021-07-23 16:38:32 +02:00
|
|
|
package eu.dnetlib.dhp.sx.graph
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper
|
2021-07-23 16:38:32 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
2022-01-12 09:40:28 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.{OtherResearchProduct, Publication, Relation, Result, Software, Dataset => OafDataset}
|
2021-07-23 16:38:32 +02:00
|
|
|
import org.apache.commons.io.IOUtils
|
2022-07-20 16:34:32 +02:00
|
|
|
import org.apache.commons.lang3.StringUtils
|
2021-07-23 16:38:32 +02:00
|
|
|
import org.apache.spark.SparkConf
|
|
|
|
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
2022-07-20 16:34:32 +02:00
|
|
|
|
2022-05-11 15:29:57 +02:00
|
|
|
import scala.collection.JavaConverters._
|
2022-01-11 16:57:48 +01:00
|
|
|
|
2021-07-23 16:38:32 +02:00
|
|
|
object SparkConvertRDDtoDataset {
|
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
2021-07-25 11:15:37 +02:00
|
|
|
|
2021-07-23 16:38:32 +02:00
|
|
|
val log: Logger = LoggerFactory.getLogger(getClass)
|
|
|
|
val conf: SparkConf = new SparkConf()
|
2022-01-11 16:57:48 +01:00
|
|
|
val parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils.toString(
|
|
|
|
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/convert_dataset_json_params.json")
|
|
|
|
)
|
|
|
|
)
|
2021-07-23 16:38:32 +02:00
|
|
|
parser.parseArgument(args)
|
|
|
|
val spark: SparkSession =
|
|
|
|
SparkSession
|
|
|
|
.builder()
|
|
|
|
.config(conf)
|
|
|
|
.appName(getClass.getSimpleName)
|
2022-01-11 16:57:48 +01:00
|
|
|
.master(parser.get("master"))
|
|
|
|
.getOrCreate()
|
2021-07-23 16:38:32 +02:00
|
|
|
|
|
|
|
val sourcePath = parser.get("sourcePath")
|
|
|
|
log.info(s"sourcePath -> $sourcePath")
|
2021-07-25 11:15:37 +02:00
|
|
|
val t = parser.get("targetPath")
|
|
|
|
log.info(s"targetPath -> $t")
|
|
|
|
|
2022-07-20 16:34:32 +02:00
|
|
|
val filterRelation = parser.get("filterRelation")
|
|
|
|
log.info(s"filterRelation -> $filterRelation")
|
|
|
|
|
2021-07-25 11:15:37 +02:00
|
|
|
val entityPath = s"$t/entities"
|
|
|
|
val relPath = s"$t/relation"
|
2021-07-23 16:38:32 +02:00
|
|
|
val mapper = new ObjectMapper()
|
2022-01-11 16:57:48 +01:00
|
|
|
implicit val datasetEncoder: Encoder[OafDataset] = Encoders.kryo(classOf[OafDataset])
|
|
|
|
implicit val publicationEncoder: Encoder[Publication] = Encoders.kryo(classOf[Publication])
|
|
|
|
implicit val relationEncoder: Encoder[Relation] = Encoders.kryo(classOf[Relation])
|
|
|
|
implicit val orpEncoder: Encoder[OtherResearchProduct] =
|
|
|
|
Encoders.kryo(classOf[OtherResearchProduct])
|
|
|
|
implicit val softwareEncoder: Encoder[Software] = Encoders.kryo(classOf[Software])
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
log.info("Converting dataset")
|
2022-01-11 16:57:48 +01:00
|
|
|
val rddDataset = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/dataset")
|
|
|
|
.map(s => mapper.readValue(s, classOf[OafDataset]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
spark
|
|
|
|
.createDataset(rddDataset)
|
|
|
|
.as[OafDataset]
|
|
|
|
.write
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.save(s"$entityPath/dataset")
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
log.info("Converting publication")
|
2022-01-11 16:57:48 +01:00
|
|
|
val rddPublication = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/publication")
|
|
|
|
.map(s => mapper.readValue(s, classOf[Publication]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
spark
|
|
|
|
.createDataset(rddPublication)
|
|
|
|
.as[Publication]
|
|
|
|
.write
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.save(s"$entityPath/publication")
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
log.info("Converting software")
|
2022-01-11 16:57:48 +01:00
|
|
|
val rddSoftware = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/software")
|
|
|
|
.map(s => mapper.readValue(s, classOf[Software]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
spark
|
|
|
|
.createDataset(rddSoftware)
|
|
|
|
.as[Software]
|
|
|
|
.write
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.save(s"$entityPath/software")
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
log.info("Converting otherresearchproduct")
|
2022-01-11 16:57:48 +01:00
|
|
|
val rddOtherResearchProduct = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/otherresearchproduct")
|
|
|
|
.map(s => mapper.readValue(s, classOf[OtherResearchProduct]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
spark
|
|
|
|
.createDataset(rddOtherResearchProduct)
|
|
|
|
.as[OtherResearchProduct]
|
|
|
|
.write
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.save(s"$entityPath/otherresearchproduct")
|
2021-07-25 11:15:37 +02:00
|
|
|
|
|
|
|
log.info("Converting Relation")
|
|
|
|
|
2022-07-20 16:34:32 +02:00
|
|
|
if (filterRelation != null && StringUtils.isNoneBlank(filterRelation)) {
|
2022-01-11 16:57:48 +01:00
|
|
|
|
2022-07-20 16:34:32 +02:00
|
|
|
val rddRelation = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/relation")
|
|
|
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
|
|
|
//filter OpenCitations relations
|
|
|
|
.filter(r =>
|
|
|
|
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
|
|
|
"opencitations".equalsIgnoreCase(k.getValue)
|
|
|
|
)
|
2022-05-13 11:01:33 +02:00
|
|
|
)
|
2022-07-21 10:11:48 +02:00
|
|
|
.filter(r => r.getSubRelType != null && r.getSubRelType.equalsIgnoreCase(filterRelation))
|
2022-07-20 16:34:32 +02:00
|
|
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
|
|
|
} else {
|
|
|
|
|
|
|
|
val relationSemanticFilter = List(
|
|
|
|
"merges",
|
|
|
|
"ismergedin",
|
|
|
|
"HasAmongTopNSimilarDocuments",
|
|
|
|
"IsAmongTopNSimilarDocuments"
|
2022-05-13 11:01:33 +02:00
|
|
|
)
|
2022-07-20 16:34:32 +02:00
|
|
|
|
|
|
|
val rddRelation = spark.sparkContext
|
|
|
|
.textFile(s"$sourcePath/relation")
|
|
|
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
|
|
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
|
|
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
|
|
|
//filter OpenCitations relations
|
|
|
|
.filter(r =>
|
|
|
|
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
|
|
|
"opencitations".equalsIgnoreCase(k.getValue)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.filter(r => !relationSemanticFilter.exists(k => k.equalsIgnoreCase(r.getRelClass)))
|
|
|
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
|
|
|
}
|
2021-07-25 11:15:37 +02:00
|
|
|
|
2021-07-23 16:38:32 +02:00
|
|
|
}
|
|
|
|
}
|