code formatting

This commit is contained in:
Claudio Atzori 2023-02-13 16:25:47 +01:00
parent 554df257ab
commit 9a03f71db1
9 changed files with 61 additions and 48 deletions

View File

@ -13,6 +13,8 @@ import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
import com.github.sisyphsu.dateparser.DateParserUtils; import com.github.sisyphsu.dateparser.DateParserUtils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -23,8 +25,6 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport; import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import me.xuender.unidecode.Unidecode; import me.xuender.unidecode.Unidecode;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
public class GraphCleaningFunctions extends CleaningFunctions { public class GraphCleaningFunctions extends CleaningFunctions {

View File

@ -27,7 +27,8 @@ object SparkCreateBaselineDataFrame {
def requestBaseLineUpdatePage(maxFile: String): List[(String, String)] = { def requestBaseLineUpdatePage(maxFile: String): List[(String, String)] = {
val data = requestPage("https://ftp.ncbi.nlm.nih.gov/pubmed/updatefiles/") val data = requestPage("https://ftp.ncbi.nlm.nih.gov/pubmed/updatefiles/")
val result = data.linesWithSeparators.map(l =>l.stripLineEnd) val result = data.linesWithSeparators
.map(l => l.stripLineEnd)
.filter(l => l.startsWith("<a href=")) .filter(l => l.startsWith("<a href="))
.map { l => .map { l =>
val end = l.lastIndexOf("\">") val end = l.lastIndexOf("\">")

View File

@ -63,7 +63,9 @@ class BioScholixTest extends AbstractVocabularyTest {
val records: String = Source val records: String = Source
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed_dump")) .fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed_dump"))
.mkString .mkString
val r: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList val r: List[Oaf] = records.linesWithSeparators
.map(l => l.stripLineEnd)
.toList
.map(s => mapper.readValue(s, classOf[PMArticle])) .map(s => mapper.readValue(s, classOf[PMArticle]))
.map(a => PubMedToOaf.convert(a, vocabularies)) .map(a => PubMedToOaf.convert(a, vocabularies))
assertEquals(10, r.size) assertEquals(10, r.size)
@ -175,7 +177,8 @@ class BioScholixTest extends AbstractVocabularyTest {
.mkString .mkString
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty)) records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList.flatMap(o => BioDBToOAF.pdbTOOaf(o)) val result: List[Oaf] =
records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.pdbTOOaf(o))
assertTrue(result.nonEmpty) assertTrue(result.nonEmpty)
result.foreach(r => assertNotNull(r)) result.foreach(r => assertNotNull(r))
@ -196,7 +199,8 @@ class BioScholixTest extends AbstractVocabularyTest {
.mkString .mkString
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty)) records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList.flatMap(o => BioDBToOAF.uniprotToOAF(o)) val result: List[Oaf] =
records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.uniprotToOAF(o))
assertTrue(result.nonEmpty) assertTrue(result.nonEmpty)
result.foreach(r => assertNotNull(r)) result.foreach(r => assertNotNull(r))
@ -241,7 +245,8 @@ class BioScholixTest extends AbstractVocabularyTest {
.mkString .mkString
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty)) records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList val result: List[Oaf] =
records.linesWithSeparators.map(l => l.stripLineEnd).map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList
assertNotNull(result) assertNotNull(result)
assertTrue(result.nonEmpty) assertTrue(result.nonEmpty)
@ -280,10 +285,13 @@ class BioScholixTest extends AbstractVocabularyTest {
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
val l: List[ScholixResolved] = records.linesWithSeparators.map(l =>l.stripLineEnd).map { input => val l: List[ScholixResolved] = records.linesWithSeparators
.map(l => l.stripLineEnd)
.map { input =>
lazy val json = parse(input) lazy val json = parse(input)
json.extract[ScholixResolved] json.extract[ScholixResolved]
}.toList }
.toList
val result: List[Oaf] = l.map(s => BioDBToOAF.scholixResolvedToOAF(s)) val result: List[Oaf] = l.map(s => BioDBToOAF.scholixResolvedToOAF(s))

View File

@ -25,7 +25,9 @@ class MappingORCIDToOAFTest {
.mkString .mkString
assertNotNull(json) assertNotNull(json)
assertFalse(json.isEmpty) assertFalse(json.isEmpty)
json.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => { json.linesWithSeparators
.map(l => l.stripLineEnd)
.foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s)) assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
}) })
} }

View File

@ -5,7 +5,6 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -27,6 +26,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob; import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Publication;
public class CleanCountryTest { public class CleanCountryTest {
@ -185,7 +185,6 @@ public class CleanCountryTest {
Assertions.assertEquals(0, tmp.first().getCountry().size()); Assertions.assertEquals(0, tmp.first().getCountry().size());
} }
} }

View File

@ -53,7 +53,8 @@ class ResolveEntitiesTest extends Serializable {
def generateUpdates(spark: SparkSession): Unit = { def generateUpdates(spark: SparkSession): Unit = {
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
val pids: List[String] = template.linesWithSeparators.map(l =>l.stripLineEnd) val pids: List[String] = template.linesWithSeparators
.map(l => l.stripLineEnd)
.map { id => .map { id =>
val r = new Result val r = new Result
r.setId(id.toLowerCase.trim) r.setId(id.toLowerCase.trim)
@ -264,7 +265,8 @@ class ResolveEntitiesTest extends Serializable {
Source Source
.fromInputStream(this.getClass.getResourceAsStream(s"publication")) .fromInputStream(this.getClass.getResourceAsStream(s"publication"))
.mkString .mkString
.linesWithSeparators.map(l =>l.stripLineEnd) .linesWithSeparators
.map(l => l.stripLineEnd)
.next(), .next(),
classOf[Publication] classOf[Publication]
) )

View File

@ -69,7 +69,8 @@ class ScholixGraphTest extends AbstractVocabularyTest {
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix") getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
) )
.mkString .mkString
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd) val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators
.map(l => l.stripLineEnd)
.sliding(2) .sliding(2)
.map(s => (s.head, s(1))) .map(s => (s.head, s(1)))
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary]))) .map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))