2021-10-20 17:37:42 +02:00
|
|
|
package eu.dnetlib.dhp.datacite
|
2021-01-28 16:34:46 +01:00
|
|
|
|
2021-10-20 17:12:08 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper
|
2021-11-25 10:54:13 +01:00
|
|
|
import eu.dnetlib.dhp.application.AbstractScalaApplication
|
2021-10-20 17:12:08 +02:00
|
|
|
import eu.dnetlib.dhp.collection.CollectionUtils.fixRelations
|
2021-11-25 10:54:13 +01:00
|
|
|
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
2021-10-20 17:37:42 +02:00
|
|
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup
|
2021-10-20 17:12:08 +02:00
|
|
|
import eu.dnetlib.dhp.schema.mdstore.{MDStoreVersion, MetadataRecord}
|
2021-01-28 16:34:46 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Oaf
|
2021-10-20 17:12:08 +02:00
|
|
|
import eu.dnetlib.dhp.utils.DHPUtils.writeHdfsFile
|
2021-01-28 16:34:46 +01:00
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory
|
|
|
|
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
|
|
|
|
|
|
|
|
2021-11-25 10:54:13 +01:00
|
|
|
class GenerateDataciteDatasetSpark (propertyPath:String, args:Array[String], log:Logger) extends AbstractScalaApplication(propertyPath, args, log:Logger) {
|
|
|
|
/**
|
|
|
|
* Here all the spark applications runs this method
|
|
|
|
* where the whole logic of the spark node is defined
|
|
|
|
*/
|
|
|
|
override def run(): Unit = {
|
2021-01-28 16:34:46 +01:00
|
|
|
|
|
|
|
val sourcePath = parser.get("sourcePath")
|
2021-11-25 10:54:13 +01:00
|
|
|
log.info(s"SourcePath is '$sourcePath'")
|
2021-06-04 10:14:22 +02:00
|
|
|
val exportLinks = "true".equalsIgnoreCase(parser.get("exportLinks"))
|
2021-11-25 10:54:13 +01:00
|
|
|
log.info(s"exportLinks is '$exportLinks'")
|
2021-01-28 16:34:46 +01:00
|
|
|
val isLookupUrl: String = parser.get("isLookupUrl")
|
|
|
|
log.info("isLookupUrl: {}", isLookupUrl)
|
|
|
|
|
|
|
|
val isLookupService = ISLookupClientFactory.getLookUpService(isLookupUrl)
|
|
|
|
val vocabularies = VocabularyGroup.loadVocsFromIS(isLookupService)
|
2021-11-25 10:54:13 +01:00
|
|
|
require(vocabularies != null)
|
2021-01-28 16:34:46 +01:00
|
|
|
|
2021-10-20 17:12:08 +02:00
|
|
|
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
2021-11-25 10:54:13 +01:00
|
|
|
log.info(s"mdstoreOutputVersion is '$mdstoreOutputVersion'")
|
|
|
|
|
2021-10-20 17:12:08 +02:00
|
|
|
val mapper = new ObjectMapper()
|
|
|
|
val cleanedMdStoreVersion = mapper.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
|
|
|
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
2021-11-25 10:54:13 +01:00
|
|
|
log.info(s"outputBasePath is '$outputBasePath'")
|
2021-10-20 17:12:08 +02:00
|
|
|
val targetPath = s"$outputBasePath/$MDSTORE_DATA_PATH"
|
2021-11-25 10:54:13 +01:00
|
|
|
log.info(s"targetPath is '$targetPath'")
|
2021-10-20 17:12:08 +02:00
|
|
|
|
2021-11-25 10:54:13 +01:00
|
|
|
generateDataciteDataset(sourcePath, exportLinks, vocabularies, targetPath, spark)
|
|
|
|
|
|
|
|
reportTotalSize(targetPath, outputBasePath)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* For working with MDStore we need to store in a file on hdfs the size of
|
|
|
|
* the current dataset
|
|
|
|
* @param targetPath
|
|
|
|
* @param outputBasePath
|
|
|
|
*/
|
|
|
|
def reportTotalSize( targetPath: String, outputBasePath: String ):Unit = {
|
|
|
|
val total_items = spark.read.load(targetPath).count()
|
|
|
|
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$total_items", outputBasePath + MDSTORE_SIZE_PATH)
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Generate the transformed and cleaned OAF Dataset from the native one
|
|
|
|
|
|
|
|
* @param sourcePath sourcePath of the native Dataset in format JSON/Datacite
|
|
|
|
* @param exportLinks If true it generates unresolved links
|
|
|
|
* @param vocabularies vocabularies for cleaning
|
|
|
|
* @param targetPath the targetPath of the result Dataset
|
|
|
|
*/
|
|
|
|
def generateDataciteDataset(sourcePath: String, exportLinks: Boolean, vocabularies: VocabularyGroup, targetPath: String, spark:SparkSession):Unit = {
|
|
|
|
require(spark!= null)
|
|
|
|
import spark.implicits._
|
|
|
|
|
|
|
|
implicit val mrEncoder: Encoder[MetadataRecord] = Encoders.kryo[MetadataRecord]
|
|
|
|
|
|
|
|
implicit val resEncoder: Encoder[Oaf] = Encoders.kryo[Oaf]
|
2021-01-28 16:34:46 +01:00
|
|
|
spark.read.load(sourcePath).as[DataciteType]
|
|
|
|
.filter(d => d.isActive)
|
2021-06-04 10:14:22 +02:00
|
|
|
.flatMap(d => DataciteToOAFTransformation.generateOAF(d.json, d.timestamp, d.timestamp, vocabularies, exportLinks))
|
2021-01-28 16:34:46 +01:00
|
|
|
.filter(d => d != null)
|
2021-10-20 17:37:42 +02:00
|
|
|
.flatMap(i => fixRelations(i)).filter(i => i != null)
|
2021-01-28 16:34:46 +01:00
|
|
|
.write.mode(SaveMode.Overwrite).save(targetPath)
|
2021-11-25 10:54:13 +01:00
|
|
|
}
|
2021-10-20 17:12:08 +02:00
|
|
|
|
2021-11-25 10:54:13 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
object GenerateDataciteDatasetSpark {
|
|
|
|
|
|
|
|
val log: Logger = LoggerFactory.getLogger(GenerateDataciteDatasetSpark.getClass)
|
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
new GenerateDataciteDatasetSpark("/eu/dnetlib/dhp/datacite/generate_dataset_params.json", args, log).initialize().run()
|
2021-01-28 16:34:46 +01:00
|
|
|
}
|
2021-10-20 17:37:42 +02:00
|
|
|
}
|