forked from D-Net/dnet-hadoop
code formatting
This commit is contained in:
parent
0727f0ef48
commit
f62c4e05cd
|
@ -255,7 +255,6 @@ public class PMArticle implements Serializable {
|
||||||
return grants;
|
return grants;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public String getPmcId() {
|
public String getPmcId() {
|
||||||
return pmcId;
|
return pmcId;
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,13 +56,11 @@ object PubMedToOaf {
|
||||||
null
|
null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def createOriginalOpenaireId(article: PMArticle): String = {
|
def createOriginalOpenaireId(article: PMArticle): String = {
|
||||||
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
||||||
val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC", "")}")
|
val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC", "")}")
|
||||||
s"$OLD_PMC_PREFIX$md5"
|
s"$OLD_PMC_PREFIX$md5"
|
||||||
}
|
} else
|
||||||
else
|
|
||||||
null
|
null
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -150,9 +148,7 @@ object PubMedToOaf {
|
||||||
dataInfo
|
dataInfo
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (StringUtils.isNotBlank(article.getPmcId)) {
|
||||||
if (StringUtils.isNotBlank(article.getPmcId))
|
|
||||||
{
|
|
||||||
pidList += OafMapperUtils.structuredProperty(
|
pidList += OafMapperUtils.structuredProperty(
|
||||||
article.getPmcId,
|
article.getPmcId,
|
||||||
PidType.pmc.toString,
|
PidType.pmc.toString,
|
||||||
|
|
|
@ -48,9 +48,6 @@ class DataciteToOAFTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testConvert(): Unit = {
|
def testConvert(): Unit = {
|
||||||
|
|
||||||
|
@ -76,11 +73,8 @@ class DataciteToOAFTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
assertEquals(100, nativeSize)
|
assertEquals(100, nativeSize)
|
||||||
|
|
||||||
val result: Dataset[String] = spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING)
|
val result: Dataset[String] =
|
||||||
|
spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
result
|
result
|
||||||
.groupBy(col("value").alias("class"))
|
.groupBy(col("value").alias("class"))
|
||||||
|
|
|
@ -10,7 +10,6 @@ object DataciteUtilityTest {
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
lazy val json = parse(input)
|
lazy val json = parse(input)
|
||||||
|
|
||||||
|
|
||||||
val isRelation: String = (json \\ "source").extractOrElse("NULL")
|
val isRelation: String = (json \\ "source").extractOrElse("NULL")
|
||||||
|
|
||||||
if (isRelation != "NULL") {
|
if (isRelation != "NULL") {
|
||||||
|
|
|
@ -76,7 +76,6 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private def checkPMArticle(article: PMArticle): Unit = {
|
private def checkPMArticle(article: PMArticle): Unit = {
|
||||||
assertNotNull(article.getPmid)
|
assertNotNull(article.getPmid)
|
||||||
assertNotNull(article.getTitle)
|
assertNotNull(article.getTitle)
|
||||||
|
@ -90,12 +89,13 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testParsingPubmedXML(): Unit = {
|
def testParsingPubmedXML(): Unit = {
|
||||||
val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")))
|
val xml = new XMLEventReader(
|
||||||
|
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||||
|
)
|
||||||
val parser = new PMParser(xml)
|
val parser = new PMParser(xml)
|
||||||
parser.foreach(checkPMArticle)
|
parser.foreach(checkPMArticle)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private def checkPubmedPublication(o: Oaf): Unit = {
|
private def checkPubmedPublication(o: Oaf): Unit = {
|
||||||
assertTrue(o.isInstanceOf[Publication])
|
assertTrue(o.isInstanceOf[Publication])
|
||||||
val p: Publication = o.asInstanceOf[Publication]
|
val p: Publication = o.asInstanceOf[Publication]
|
||||||
|
@ -112,22 +112,20 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
assertNotNull(p.getOriginalId)
|
assertNotNull(p.getOriginalId)
|
||||||
p.getOriginalId.asScala.foreach(oId => assertNotNull(oId))
|
p.getOriginalId.asScala.foreach(oId => assertNotNull(oId))
|
||||||
|
|
||||||
|
val hasPMC = p
|
||||||
val hasPMC = p.getInstance().asScala.exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString)))
|
.getInstance()
|
||||||
|
.asScala
|
||||||
|
.exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString)))
|
||||||
|
|
||||||
if (hasPMC) {
|
if (hasPMC) {
|
||||||
assertTrue(p.getOriginalId.asScala.exists(oId => oId.startsWith("od_______267::")))
|
assertTrue(p.getOriginalId.asScala.exists(oId => oId.startsWith("od_______267::")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testPubmedOriginalID(): Unit = {
|
def testPubmedOriginalID(): Unit = {
|
||||||
val article: PMArticle = new PMArticle
|
val article: PMArticle = new PMArticle
|
||||||
|
|
||||||
|
|
||||||
article.setPmid("1234")
|
article.setPmid("1234")
|
||||||
|
|
||||||
article.setTitle("a Title")
|
article.setTitle("a Title")
|
||||||
|
@ -146,7 +144,6 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
// VERIFY ORIGINAL ID GENERATE IN OLD WAY USING PMC IDENTIFIER EXISTS
|
// VERIFY ORIGINAL ID GENERATE IN OLD WAY USING PMC IDENTIFIER EXISTS
|
||||||
|
|
||||||
|
|
||||||
val oldOpenaireID = "od_______267::0000072375bc0e68fa09d4e6b7658248"
|
val oldOpenaireID = "od_______267::0000072375bc0e68fa09d4e6b7658248"
|
||||||
|
|
||||||
val hasOldOpenAIREID = publication.getOriginalId.asScala.exists(o => o.equalsIgnoreCase(oldOpenaireID))
|
val hasOldOpenAIREID = publication.getOriginalId.asScala.exists(o => o.equalsIgnoreCase(oldOpenaireID))
|
||||||
|
@ -154,22 +151,18 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
assertTrue(hasOldOpenAIREID)
|
assertTrue(hasOldOpenAIREID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testPubmedMapping(): Unit = {
|
def testPubmedMapping(): Unit = {
|
||||||
|
|
||||||
val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")))
|
val xml = new XMLEventReader(
|
||||||
|
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||||
|
)
|
||||||
val parser = new PMParser(xml)
|
val parser = new PMParser(xml)
|
||||||
val results = ListBuffer[Oaf]()
|
val results = ListBuffer[Oaf]()
|
||||||
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
results.foreach(checkPubmedPublication)
|
results.foreach(checkPubmedPublication)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue