From f62c4e05cd239830628154dc09fb7d386f135201 Mon Sep 17 00:00:00 2001 From: Claudio Atzori Date: Fri, 29 Jul 2022 11:56:01 +0200 Subject: [PATCH] code formatting --- .../dnetlib/dhp/sx/bio/pubmed/PMArticle.java | 1 - .../dhp/sx/bio/pubmed/PubMedToOaf.scala | 40 +++++++-------- .../dhp/datacite/DataciteToOAFTest.scala | 10 +--- .../dhp/datacite/DataciteUtilityTest.scala | 11 ++-- .../dnetlib/dhp/sx/bio/BioScholixTest.scala | 51 ++++++++----------- .../raw/MigrateDbEntitiesApplicationTest.java | 12 ++--- 6 files changed, 53 insertions(+), 72 deletions(-) diff --git a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/sx/bio/pubmed/PMArticle.java b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/sx/bio/pubmed/PMArticle.java index 9287a8cdd..3fb814606 100644 --- a/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/sx/bio/pubmed/PMArticle.java +++ b/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/sx/bio/pubmed/PMArticle.java @@ -255,7 +255,6 @@ public class PMArticle implements Serializable { return grants; } - public String getPmcId() { return pmcId; } diff --git a/dhp-workflows/dhp-aggregation/src/main/scala/eu/dnetlib/dhp/sx/bio/pubmed/PubMedToOaf.scala b/dhp-workflows/dhp-aggregation/src/main/scala/eu/dnetlib/dhp/sx/bio/pubmed/PubMedToOaf.scala index 24a1fa62b..42bafc93e 100644 --- a/dhp-workflows/dhp-aggregation/src/main/scala/eu/dnetlib/dhp/sx/bio/pubmed/PubMedToOaf.scala +++ b/dhp-workflows/dhp-aggregation/src/main/scala/eu/dnetlib/dhp/sx/bio/pubmed/PubMedToOaf.scala @@ -56,13 +56,11 @@ object PubMedToOaf { null } - - def createOriginalOpenaireId(article:PMArticle) :String = { + def createOriginalOpenaireId(article: PMArticle): String = { if (StringUtils.isNotEmpty(article.getPmcId)) { - val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC","")}") + val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC", "")}") s"$OLD_PMC_PREFIX$md5" - } - else + } else null } @@ -142,26 +140,24 @@ object PubMedToOaf { val pidList = ListBuffer[StructuredProperty]() pidList += OafMapperUtils.structuredProperty( - article.getPmid, - PidType.pmid.toString, - PidType.pmid.toString, + article.getPmid, + PidType.pmid.toString, + PidType.pmid.toString, + ModelConstants.DNET_PID_TYPES, + ModelConstants.DNET_PID_TYPES, + dataInfo + ) + + if (StringUtils.isNotBlank(article.getPmcId)) { + pidList += OafMapperUtils.structuredProperty( + article.getPmcId, + PidType.pmc.toString, + PidType.pmc.toString, ModelConstants.DNET_PID_TYPES, ModelConstants.DNET_PID_TYPES, dataInfo ) - - - if (StringUtils.isNotBlank(article.getPmcId)) - { - pidList += OafMapperUtils.structuredProperty( - article.getPmcId, - PidType.pmc.toString, - PidType.pmc.toString, - ModelConstants.DNET_PID_TYPES, - ModelConstants.DNET_PID_TYPES, - dataInfo - ) - } + } if (pidList == null) return null @@ -297,7 +293,7 @@ object PubMedToOaf { if (StringUtils.isNotEmpty(article.getPmcId)) { val originalIDS = ListBuffer[String]() originalIDS += createOriginalOpenaireId(article) - pidList.map(s => s.getValue).foreach(p =>originalIDS += p) + pidList.map(s => s.getValue).foreach(p => originalIDS += p) result.setOriginalId(originalIDS.asJava) } else result.setOriginalId(pidList.map(s => s.getValue).asJava) diff --git a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteToOAFTest.scala b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteToOAFTest.scala index 68230b477..48da049da 100644 --- a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteToOAFTest.scala +++ b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteToOAFTest.scala @@ -48,9 +48,6 @@ class DataciteToOAFTest extends AbstractVocabularyTest { } - - - @Test def testConvert(): Unit = { @@ -76,11 +73,8 @@ class DataciteToOAFTest extends AbstractVocabularyTest { assertEquals(100, nativeSize) - val result: Dataset[String] = spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING) - - - - + val result: Dataset[String] = + spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING) result .groupBy(col("value").alias("class")) diff --git a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteUtilityTest.scala b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteUtilityTest.scala index 04d3c4a58..942e0958e 100644 --- a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteUtilityTest.scala +++ b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/datacite/DataciteUtilityTest.scala @@ -6,25 +6,24 @@ import org.json4s.jackson.JsonMethods.parse object DataciteUtilityTest { - def convertToOAF(input:String) : String = { + def convertToOAF(input: String): String = { implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats lazy val json = parse(input) - - val isRelation:String = (json \\ "source").extractOrElse("NULL") + val isRelation: String = (json \\ "source").extractOrElse("NULL") if (isRelation != "NULL") { return "Relation" } val iType: List[String] = for { - JObject(instance) <- json \\ "instance" + JObject(instance) <- json \\ "instance" JField("instancetype", JObject(instancetype)) <- instance - JField("classname", JString(classname)) <- instancetype + JField("classname", JString(classname)) <- instancetype } yield classname - val l:String =iType.head.toLowerCase() + val l: String = iType.head.toLowerCase() l } diff --git a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/sx/bio/BioScholixTest.scala b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/sx/bio/BioScholixTest.scala index b021e5e07..827d23e72 100644 --- a/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/sx/bio/BioScholixTest.scala +++ b/dhp-workflows/dhp-aggregation/src/test/scala/eu/dnetlib/dhp/sx/bio/BioScholixTest.scala @@ -76,12 +76,11 @@ class BioScholixTest extends AbstractVocabularyTest { } - - private def checkPMArticle(article:PMArticle): Unit = { + private def checkPMArticle(article: PMArticle): Unit = { assertNotNull(article.getPmid) assertNotNull(article.getTitle) assertNotNull(article.getAuthors) - article.getAuthors.asScala.foreach{a => + article.getAuthors.asScala.foreach { a => assertNotNull(a) assertNotNull(a.getFullName) } @@ -89,20 +88,21 @@ class BioScholixTest extends AbstractVocabularyTest { } @Test - def testParsingPubmedXML():Unit = { - val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))) + def testParsingPubmedXML(): Unit = { + val xml = new XMLEventReader( + Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")) + ) val parser = new PMParser(xml) parser.foreach(checkPMArticle) } - - private def checkPubmedPublication(o:Oaf): Unit = { + private def checkPubmedPublication(o: Oaf): Unit = { assertTrue(o.isInstanceOf[Publication]) - val p:Publication = o.asInstanceOf[Publication] + val p: Publication = o.asInstanceOf[Publication] assertNotNull(p.getId) assertNotNull(p.getTitle) - p.getTitle.asScala.foreach(t =>assertNotNull(t.getValue)) - p.getAuthor.asScala.foreach(a =>assertNotNull(a.getFullname)) + p.getTitle.asScala.foreach(t => assertNotNull(t.getValue)) + p.getAuthor.asScala.foreach(a => assertNotNull(a.getFullname)) assertNotNull(p.getInstance()) p.getInstance().asScala.foreach { i => assertNotNull(i.getCollectedfrom) @@ -112,28 +112,26 @@ class BioScholixTest extends AbstractVocabularyTest { assertNotNull(p.getOriginalId) p.getOriginalId.asScala.foreach(oId => assertNotNull(oId)) - - val hasPMC = p.getInstance().asScala.exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString))) - - + val hasPMC = p + .getInstance() + .asScala + .exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString))) if (hasPMC) { assertTrue(p.getOriginalId.asScala.exists(oId => oId.startsWith("od_______267::"))) } } - @Test - def testPubmedOriginalID():Unit = { - val article:PMArticle = new PMArticle - + def testPubmedOriginalID(): Unit = { + val article: PMArticle = new PMArticle article.setPmid("1234") article.setTitle("a Title") // VERIFY PUBLICATION IS NOT NULL - article.getPublicationTypes.add( new PMSubject("article",null, null)) + article.getPublicationTypes.add(new PMSubject("article", null, null)) var publication = PubMedToOaf.convert(article, vocabularies).asInstanceOf[Publication] assertNotNull(publication) assertEquals("50|pmid________::81dc9bdb52d04dc20036dbd8313ed055", publication.getId) @@ -146,30 +144,25 @@ class BioScholixTest extends AbstractVocabularyTest { // VERIFY ORIGINAL ID GENERATE IN OLD WAY USING PMC IDENTIFIER EXISTS - - val oldOpenaireID ="od_______267::0000072375bc0e68fa09d4e6b7658248" + val oldOpenaireID = "od_______267::0000072375bc0e68fa09d4e6b7658248" val hasOldOpenAIREID = publication.getOriginalId.asScala.exists(o => o.equalsIgnoreCase(oldOpenaireID)) assertTrue(hasOldOpenAIREID) } - @Test - def testPubmedMapping() :Unit = { + def testPubmedMapping(): Unit = { - val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))) + val xml = new XMLEventReader( + Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")) + ) val parser = new PMParser(xml) val results = ListBuffer[Oaf]() parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies)) - - - results.foreach(checkPubmedPublication) - - } @Test diff --git a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java index 06947103a..069edc5a6 100644 --- a/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java +++ b/dhp-workflows/dhp-graph-mapper/src/test/java/eu/dnetlib/dhp/oa/graph/raw/MigrateDbEntitiesApplicationTest.java @@ -162,7 +162,7 @@ class MigrateDbEntitiesApplicationTest { .stream() .map(Qualifier::getSchemeid) .collect(Collectors.toCollection(HashSet::new)); - assertEquals(1,cpSchemeId.size()); + assertEquals(1, cpSchemeId.size()); assertTrue(cpSchemeId.contains("eosc:contentpolicies")); HashSet cpSchemeName = ds .getContentpolicies() @@ -289,16 +289,16 @@ class MigrateDbEntitiesApplicationTest { checkProperty(r1, "contribution", "436754.0"); checkProperty(r2, "contribution", "436754.0"); - checkProperty(r1, "currency","EUR"); + checkProperty(r1, "currency", "EUR"); checkProperty(r2, "currency", "EUR"); } private void checkProperty(Relation r, String property, String value) { final List p = r - .getProperties() - .stream() - .filter(kv -> kv.getKey().equals(property)) - .collect(Collectors.toList()); + .getProperties() + .stream() + .filter(kv -> kv.getKey().equals(property)) + .collect(Collectors.toList()); assertFalse(p.isEmpty()); assertEquals(1, p.size()); assertEquals(value, p.get(0).getValue());