2021-10-15 15:00:15 +02:00
|
|
|
package eu.dnetlib.dhp.sx.bio
|
2021-06-24 17:20:00 +02:00
|
|
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
2021-10-20 17:12:08 +02:00
|
|
|
import eu.dnetlib.dhp.collection.CollectionUtils
|
2021-12-06 11:26:36 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Oaf
|
|
|
|
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
2021-06-24 17:20:00 +02:00
|
|
|
import org.apache.commons.io.IOUtils
|
|
|
|
import org.apache.spark.SparkConf
|
2022-01-03 17:25:26 +01:00
|
|
|
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
2021-06-24 17:20:00 +02:00
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
|
|
|
|
|
|
|
object SparkTransformBioDatabaseToOAF {
|
|
|
|
|
|
|
|
def main(args: Array[String]): Unit = {
|
|
|
|
val conf: SparkConf = new SparkConf()
|
|
|
|
val log: Logger = LoggerFactory.getLogger(getClass)
|
2022-01-11 16:57:48 +01:00
|
|
|
val parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils.toString(
|
|
|
|
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/bio/ebi/bio_to_oaf_params.json")
|
|
|
|
)
|
|
|
|
)
|
2021-06-24 17:20:00 +02:00
|
|
|
parser.parseArgument(args)
|
|
|
|
val database: String = parser.get("database")
|
|
|
|
log.info("database: {}", database)
|
|
|
|
|
|
|
|
val dbPath: String = parser.get("dbPath")
|
|
|
|
log.info("dbPath: {}", database)
|
|
|
|
val targetPath: String = parser.get("targetPath")
|
|
|
|
log.info("targetPath: {}", database)
|
|
|
|
|
|
|
|
val spark: SparkSession =
|
|
|
|
SparkSession
|
|
|
|
.builder()
|
|
|
|
.config(conf)
|
|
|
|
.appName(getClass.getSimpleName)
|
2022-01-11 16:57:48 +01:00
|
|
|
.master(parser.get("master"))
|
|
|
|
.getOrCreate()
|
2021-06-24 17:20:00 +02:00
|
|
|
val sc = spark.sparkContext
|
|
|
|
|
2021-10-12 08:11:53 +02:00
|
|
|
implicit val resultEncoder: Encoder[Oaf] = Encoders.kryo(classOf[Oaf])
|
|
|
|
import spark.implicits._
|
2021-06-24 17:20:00 +02:00
|
|
|
database.toUpperCase() match {
|
|
|
|
case "UNIPROT" =>
|
2022-01-11 16:57:48 +01:00
|
|
|
CollectionUtils.saveDataset(
|
|
|
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.uniprotToOAF(i))),
|
|
|
|
targetPath
|
|
|
|
)
|
2021-10-12 08:11:53 +02:00
|
|
|
case "PDB" =>
|
2022-01-11 16:57:48 +01:00
|
|
|
CollectionUtils.saveDataset(
|
|
|
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.pdbTOOaf(i))),
|
|
|
|
targetPath
|
|
|
|
)
|
2021-06-28 22:04:22 +02:00
|
|
|
case "SCHOLIX" =>
|
2022-01-11 16:57:48 +01:00
|
|
|
CollectionUtils.saveDataset(
|
|
|
|
spark.read.load(dbPath).as[ScholixResolved].map(i => BioDBToOAF.scholixResolvedToOAF(i)),
|
|
|
|
targetPath
|
|
|
|
)
|
2021-10-12 08:11:53 +02:00
|
|
|
case "CROSSREF_LINKS" =>
|
2022-01-11 16:57:48 +01:00
|
|
|
CollectionUtils.saveDataset(
|
|
|
|
spark.createDataset(sc.textFile(dbPath).map(i => BioDBToOAF.crossrefLinksToOaf(i))),
|
|
|
|
targetPath
|
|
|
|
)
|
2021-06-24 17:20:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|