2020-05-22 15:15:09 +02:00
|
|
|
package eu.dnetlib.doiboost.orcid
|
|
|
|
|
2020-12-23 16:59:52 +01:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper
|
2020-12-07 19:59:33 +01:00
|
|
|
import eu.dnetlib.dhp.schema.oaf.Publication
|
2021-04-13 17:47:43 +02:00
|
|
|
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SparkSession}
|
2020-06-09 18:07:14 +02:00
|
|
|
import org.junit.jupiter.api.Assertions._
|
2020-05-22 15:15:09 +02:00
|
|
|
import org.junit.jupiter.api.Test
|
2021-04-13 17:47:43 +02:00
|
|
|
import org.junit.jupiter.api.io.TempDir
|
2020-05-22 15:15:09 +02:00
|
|
|
import org.slf4j.{Logger, LoggerFactory}
|
|
|
|
|
2021-04-13 17:47:43 +02:00
|
|
|
import java.nio.file.Path
|
2020-05-22 15:15:09 +02:00
|
|
|
import scala.io.Source
|
|
|
|
|
|
|
|
class MappingORCIDToOAFTest {
|
2020-06-09 18:07:14 +02:00
|
|
|
val logger: Logger = LoggerFactory.getLogger(ORCIDToOAF.getClass)
|
2020-05-22 15:15:09 +02:00
|
|
|
val mapper = new ObjectMapper()
|
|
|
|
|
|
|
|
@Test
|
|
|
|
def testExtractData():Unit ={
|
|
|
|
val json = Source.fromInputStream(getClass.getResourceAsStream("dataOutput")).mkString
|
|
|
|
assertNotNull(json)
|
|
|
|
assertFalse(json.isEmpty)
|
|
|
|
json.lines.foreach(s => {
|
|
|
|
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-04-13 17:47:43 +02:00
|
|
|
@Test
|
|
|
|
def testOAFConvert(@TempDir testDir: Path):Unit ={
|
|
|
|
val sourcePath:String = getClass.getResource("/eu/dnetlib/doiboost/orcid/datasets").getPath
|
|
|
|
val targetPath: String =s"${testDir.toString}/output/orcidPublication"
|
|
|
|
val workingPath =s"${testDir.toString}/wp/"
|
|
|
|
|
|
|
|
val spark: SparkSession =
|
|
|
|
SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(getClass.getSimpleName)
|
|
|
|
.master("local[*]").getOrCreate()
|
|
|
|
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.kryo[Publication]
|
|
|
|
import spark.implicits._
|
|
|
|
|
|
|
|
SparkConvertORCIDToOAF.run( spark,sourcePath, workingPath, targetPath)
|
|
|
|
|
|
|
|
val mapper = new ObjectMapper()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
val oA = spark.read.load(s"$workingPath/orcidworksWithAuthor").as[ORCIDItem].count()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
val p: Dataset[Publication] = spark.read.load(targetPath).as[Publication]
|
|
|
|
|
|
|
|
assertTrue(oA == p.count())
|
|
|
|
println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(p.first()))
|
|
|
|
|
|
|
|
|
|
|
|
}
|
2020-12-07 19:59:33 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
2020-05-22 15:15:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}
|