forked from D-Net/dnet-hadoop
updated lines function to it's implementation linesWithSeparators.map(l => l.stripLineEnd) in this way we force scala plugin compiler to consider this pipeline scala code and not java.string.lines() pipeline
This commit is contained in:
parent
f910b7379d
commit
91c70b15a5
|
@ -27,7 +27,7 @@ object SparkCreateBaselineDataFrame {
|
|||
def requestBaseLineUpdatePage(maxFile: String): List[(String, String)] = {
|
||||
val data = requestPage("https://ftp.ncbi.nlm.nih.gov/pubmed/updatefiles/")
|
||||
|
||||
val result = data.lines
|
||||
val result = data.linesWithSeparators.map(l => l.stripLineEnd)
|
||||
.filter(l => l.startsWith("<a href="))
|
||||
.map { l =>
|
||||
val end = l.lastIndexOf("\">")
|
||||
|
|
|
@ -63,7 +63,7 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
val records: String = Source
|
||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed_dump"))
|
||||
.mkString
|
||||
val r: List[Oaf] = records.lines.toList
|
||||
val r: List[Oaf] = records.linesWithSeparators.map(l => l.stripLineEnd).toList
|
||||
.map(s => mapper.readValue(s, classOf[PMArticle]))
|
||||
.map(a => PubMedToOaf.convert(a, vocabularies))
|
||||
assertEquals(10, r.size)
|
||||
|
@ -173,9 +173,9 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
val records: String = Source
|
||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pdb_dump"))
|
||||
.mkString
|
||||
records.lines.foreach(s => assertTrue(s.nonEmpty))
|
||||
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||
|
||||
val result: List[Oaf] = records.lines.toList.flatMap(o => BioDBToOAF.pdbTOOaf(o))
|
||||
val result: List[Oaf] = records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.pdbTOOaf(o))
|
||||
|
||||
assertTrue(result.nonEmpty)
|
||||
result.foreach(r => assertNotNull(r))
|
||||
|
@ -194,9 +194,9 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
val records: String = Source
|
||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/uniprot_dump"))
|
||||
.mkString
|
||||
records.lines.foreach(s => assertTrue(s.nonEmpty))
|
||||
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||
|
||||
val result: List[Oaf] = records.lines.toList.flatMap(o => BioDBToOAF.uniprotToOAF(o))
|
||||
val result: List[Oaf] = records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.uniprotToOAF(o))
|
||||
|
||||
assertTrue(result.nonEmpty)
|
||||
result.foreach(r => assertNotNull(r))
|
||||
|
@ -239,9 +239,9 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
val records: String = Source
|
||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/crossref_links"))
|
||||
.mkString
|
||||
records.lines.foreach(s => assertTrue(s.nonEmpty))
|
||||
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||
|
||||
val result: List[Oaf] = records.lines.map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList
|
||||
val result: List[Oaf] = records.linesWithSeparators.map(l => l.stripLineEnd).map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList
|
||||
|
||||
assertNotNull(result)
|
||||
assertTrue(result.nonEmpty)
|
||||
|
@ -276,11 +276,11 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/scholix_resolved")
|
||||
)
|
||||
.mkString
|
||||
records.lines.foreach(s => assertTrue(s.nonEmpty))
|
||||
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||
|
||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||
|
||||
val l: List[ScholixResolved] = records.lines.map { input =>
|
||||
val l: List[ScholixResolved] = records.linesWithSeparators.map(l => l.stripLineEnd).map { input =>
|
||||
lazy val json = parse(input)
|
||||
json.extract[ScholixResolved]
|
||||
}.toList
|
||||
|
|
|
@ -31,13 +31,13 @@ class CrossrefMappingTest {
|
|||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
|
||||
.mkString
|
||||
|
||||
for (line <- funder_doi.lines) {
|
||||
for (line <- funder_doi.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||
val json = template.replace("%s", line)
|
||||
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
||||
assertTrue(resultList.nonEmpty)
|
||||
checkRelation(resultList)
|
||||
}
|
||||
for (line <- funder_name.lines) {
|
||||
for (line <- funder_name.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||
val json = template.replace("%s", line)
|
||||
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
||||
assertTrue(resultList.nonEmpty)
|
||||
|
|
|
@ -25,7 +25,7 @@ class MappingORCIDToOAFTest {
|
|||
.mkString
|
||||
assertNotNull(json)
|
||||
assertFalse(json.isEmpty)
|
||||
json.lines.foreach(s => {
|
||||
json.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => {
|
||||
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ class UnpayWallMappingTest {
|
|||
.mkString
|
||||
|
||||
var i: Int = 0
|
||||
for (line <- Ilist.lines) {
|
||||
for (line <- Ilist.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||
val p = UnpayWallToOAF.convertToOAF(line)
|
||||
|
||||
if (p != null) {
|
||||
|
@ -43,7 +43,7 @@ class UnpayWallMappingTest {
|
|||
i = i + 1
|
||||
}
|
||||
|
||||
val l = Ilist.lines.next()
|
||||
val l = Ilist.linesWithSeparators.map(l => l.stripLineEnd).next()
|
||||
|
||||
val item = UnpayWallToOAF.convertToOAF(l)
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ class ResolveEntitiesTest extends Serializable {
|
|||
def generateUpdates(spark: SparkSession): Unit = {
|
||||
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
|
||||
|
||||
val pids: List[String] = template.lines
|
||||
val pids: List[String] = template.linesWithSeparators.map(l => l.stripLineEnd)
|
||||
.map { id =>
|
||||
val r = new Result
|
||||
r.setId(id.toLowerCase.trim)
|
||||
|
@ -127,7 +127,7 @@ class ResolveEntitiesTest extends Serializable {
|
|||
entities.foreach { e =>
|
||||
val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString
|
||||
spark
|
||||
.createDataset(spark.sparkContext.parallelize(template.lines.toList))
|
||||
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l => l.stripLineEnd).toList))
|
||||
.as[String]
|
||||
.write
|
||||
.option("compression", "gzip")
|
||||
|
@ -264,7 +264,7 @@ class ResolveEntitiesTest extends Serializable {
|
|||
Source
|
||||
.fromInputStream(this.getClass.getResourceAsStream(s"publication"))
|
||||
.mkString
|
||||
.lines
|
||||
.linesWithSeparators.map(l => l.stripLineEnd)
|
||||
.next(),
|
||||
classOf[Publication]
|
||||
)
|
||||
|
|
|
@ -47,7 +47,7 @@ class ScholixGraphTest extends AbstractVocabularyTest {
|
|||
val inputRelations = Source
|
||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary"))
|
||||
.mkString
|
||||
val items = inputRelations.lines.toList
|
||||
val items = inputRelations.linesWithSeparators.map(l => l.stripLineEnd).toList
|
||||
assertNotNull(items)
|
||||
items.foreach(i => assertTrue(i.nonEmpty))
|
||||
val result =
|
||||
|
@ -69,7 +69,7 @@ class ScholixGraphTest extends AbstractVocabularyTest {
|
|||
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
|
||||
)
|
||||
.mkString
|
||||
val result: List[(Relation, ScholixSummary)] = inputRelations.lines
|
||||
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators.map(l => l.stripLineEnd)
|
||||
.sliding(2)
|
||||
.map(s => (s.head, s(1)))
|
||||
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))
|
||||
|
|
Loading…
Reference in New Issue