forked from D-Net/dnet-hadoop
Merge remote-tracking branch 'origin/beta' into beta
This commit is contained in:
commit
118c1fc3b3
|
@ -13,6 +13,8 @@ import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
|
||||||
import com.github.sisyphsu.dateparser.DateParserUtils;
|
import com.github.sisyphsu.dateparser.DateParserUtils;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
@ -23,8 +25,6 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import me.xuender.unidecode.Unidecode;
|
import me.xuender.unidecode.Unidecode;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
|
|
||||||
public class GraphCleaningFunctions extends CleaningFunctions {
|
public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,8 @@ object SparkCreateBaselineDataFrame {
|
||||||
def requestBaseLineUpdatePage(maxFile: String): List[(String, String)] = {
|
def requestBaseLineUpdatePage(maxFile: String): List[(String, String)] = {
|
||||||
val data = requestPage("https://ftp.ncbi.nlm.nih.gov/pubmed/updatefiles/")
|
val data = requestPage("https://ftp.ncbi.nlm.nih.gov/pubmed/updatefiles/")
|
||||||
|
|
||||||
val result = data.linesWithSeparators.map(l =>l.stripLineEnd)
|
val result = data.linesWithSeparators
|
||||||
|
.map(l => l.stripLineEnd)
|
||||||
.filter(l => l.startsWith("<a href="))
|
.filter(l => l.startsWith("<a href="))
|
||||||
.map { l =>
|
.map { l =>
|
||||||
val end = l.lastIndexOf("\">")
|
val end = l.lastIndexOf("\">")
|
||||||
|
|
|
@ -63,7 +63,9 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
val records: String = Source
|
val records: String = Source
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed_dump"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed_dump"))
|
||||||
.mkString
|
.mkString
|
||||||
val r: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList
|
val r: List[Oaf] = records.linesWithSeparators
|
||||||
|
.map(l => l.stripLineEnd)
|
||||||
|
.toList
|
||||||
.map(s => mapper.readValue(s, classOf[PMArticle]))
|
.map(s => mapper.readValue(s, classOf[PMArticle]))
|
||||||
.map(a => PubMedToOaf.convert(a, vocabularies))
|
.map(a => PubMedToOaf.convert(a, vocabularies))
|
||||||
assertEquals(10, r.size)
|
assertEquals(10, r.size)
|
||||||
|
@ -173,9 +175,10 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
val records: String = Source
|
val records: String = Source
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pdb_dump"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pdb_dump"))
|
||||||
.mkString
|
.mkString
|
||||||
records.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||||
|
|
||||||
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList.flatMap(o => BioDBToOAF.pdbTOOaf(o))
|
val result: List[Oaf] =
|
||||||
|
records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.pdbTOOaf(o))
|
||||||
|
|
||||||
assertTrue(result.nonEmpty)
|
assertTrue(result.nonEmpty)
|
||||||
result.foreach(r => assertNotNull(r))
|
result.foreach(r => assertNotNull(r))
|
||||||
|
@ -194,9 +197,10 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
val records: String = Source
|
val records: String = Source
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/uniprot_dump"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/uniprot_dump"))
|
||||||
.mkString
|
.mkString
|
||||||
records.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||||
|
|
||||||
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).toList.flatMap(o => BioDBToOAF.uniprotToOAF(o))
|
val result: List[Oaf] =
|
||||||
|
records.linesWithSeparators.map(l => l.stripLineEnd).toList.flatMap(o => BioDBToOAF.uniprotToOAF(o))
|
||||||
|
|
||||||
assertTrue(result.nonEmpty)
|
assertTrue(result.nonEmpty)
|
||||||
result.foreach(r => assertNotNull(r))
|
result.foreach(r => assertNotNull(r))
|
||||||
|
@ -239,9 +243,10 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
val records: String = Source
|
val records: String = Source
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/crossref_links"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/crossref_links"))
|
||||||
.mkString
|
.mkString
|
||||||
records.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||||
|
|
||||||
val result: List[Oaf] = records.linesWithSeparators.map(l =>l.stripLineEnd).map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList
|
val result: List[Oaf] =
|
||||||
|
records.linesWithSeparators.map(l => l.stripLineEnd).map(s => BioDBToOAF.crossrefLinksToOaf(s)).toList
|
||||||
|
|
||||||
assertNotNull(result)
|
assertNotNull(result)
|
||||||
assertTrue(result.nonEmpty)
|
assertTrue(result.nonEmpty)
|
||||||
|
@ -276,14 +281,17 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/scholix_resolved")
|
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/scholix_resolved")
|
||||||
)
|
)
|
||||||
.mkString
|
.mkString
|
||||||
records.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
records.linesWithSeparators.map(l => l.stripLineEnd).foreach(s => assertTrue(s.nonEmpty))
|
||||||
|
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
|
|
||||||
val l: List[ScholixResolved] = records.linesWithSeparators.map(l =>l.stripLineEnd).map { input =>
|
val l: List[ScholixResolved] = records.linesWithSeparators
|
||||||
lazy val json = parse(input)
|
.map(l => l.stripLineEnd)
|
||||||
json.extract[ScholixResolved]
|
.map { input =>
|
||||||
}.toList
|
lazy val json = parse(input)
|
||||||
|
json.extract[ScholixResolved]
|
||||||
|
}
|
||||||
|
.toList
|
||||||
|
|
||||||
val result: List[Oaf] = l.map(s => BioDBToOAF.scholixResolvedToOAF(s))
|
val result: List[Oaf] = l.map(s => BioDBToOAF.scholixResolvedToOAF(s))
|
||||||
|
|
||||||
|
|
|
@ -37,12 +37,24 @@ public class SubscriptionUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean verifyDateRange(final long date, final String min, final String max) {
|
public static boolean verifyDateRange(final long date, final String min, final String max) {
|
||||||
|
|
||||||
|
long from = 0;
|
||||||
|
long to = Long.MAX_VALUE;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return date >= DateUtils.parseDate(min, "yyyy-MM-dd").getTime()
|
from = min != null ? DateUtils.parseDate(min, "yyyy-MM-dd").getTime() : 0;
|
||||||
&& date < DateUtils.parseDate(max, "yyyy-MM-dd").getTime() + ONE_DAY;
|
|
||||||
} catch (final ParseException e) {
|
} catch (final ParseException e) {
|
||||||
return false;
|
from = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
to = max != null ? DateUtils.parseDate(max, "yyyy-MM-dd").getTime() + ONE_DAY : Long.MAX_VALUE;
|
||||||
|
} catch (final ParseException e) {
|
||||||
|
to = Long.MAX_VALUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return date >= from && date < to;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean verifyExact(final String s1, final String s2) {
|
public static boolean verifyExact(final String s1, final String s2) {
|
||||||
|
|
|
@ -41,6 +41,18 @@ public class SubscriptionUtilsTest {
|
||||||
|
|
||||||
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", "2011-01-01"));
|
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", "2011-01-01"));
|
||||||
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", "2021-01-01"));
|
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", "2021-01-01"));
|
||||||
|
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", "NULL"));
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", null));
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, "NULL", "2011-01-01"));
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, null, "2011-01-01"));
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, "NULL", "NULL"));
|
||||||
|
assertTrue(SubscriptionUtils.verifyDateRange(date, null, null));
|
||||||
|
|
||||||
|
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", null));
|
||||||
|
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", "NULL"));
|
||||||
|
assertFalse(SubscriptionUtils.verifyDateRange(date, null, "2005-01-01"));
|
||||||
|
assertFalse(SubscriptionUtils.verifyDateRange(date, "NULL", "2005-01-01"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -36,13 +36,13 @@ class CrossrefMappingTest {
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
|
||||||
.mkString
|
.mkString
|
||||||
|
|
||||||
for (line <- funder_doi.linesWithSeparators.map(l =>l.stripLineEnd)) {
|
for (line <- funder_doi.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||||
val json = template.replace("%s", line)
|
val json = template.replace("%s", line)
|
||||||
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
||||||
assertTrue(resultList.nonEmpty)
|
assertTrue(resultList.nonEmpty)
|
||||||
checkRelation(resultList)
|
checkRelation(resultList)
|
||||||
}
|
}
|
||||||
for (line <- funder_name.linesWithSeparators.map(l =>l.stripLineEnd)) {
|
for (line <- funder_name.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||||
val json = template.replace("%s", line)
|
val json = template.replace("%s", line)
|
||||||
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
||||||
assertTrue(resultList.nonEmpty)
|
assertTrue(resultList.nonEmpty)
|
||||||
|
|
|
@ -25,9 +25,11 @@ class MappingORCIDToOAFTest {
|
||||||
.mkString
|
.mkString
|
||||||
assertNotNull(json)
|
assertNotNull(json)
|
||||||
assertFalse(json.isEmpty)
|
assertFalse(json.isEmpty)
|
||||||
json.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => {
|
json.linesWithSeparators
|
||||||
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
|
.map(l => l.stripLineEnd)
|
||||||
})
|
.foreach(s => {
|
||||||
|
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -22,7 +22,7 @@ class UnpayWallMappingTest {
|
||||||
.mkString
|
.mkString
|
||||||
|
|
||||||
var i: Int = 0
|
var i: Int = 0
|
||||||
for (line <- Ilist.linesWithSeparators.map(l =>l.stripLineEnd)) {
|
for (line <- Ilist.linesWithSeparators.map(l => l.stripLineEnd)) {
|
||||||
val p = UnpayWallToOAF.convertToOAF(line)
|
val p = UnpayWallToOAF.convertToOAF(line)
|
||||||
|
|
||||||
if (p != null) {
|
if (p != null) {
|
||||||
|
@ -43,7 +43,7 @@ class UnpayWallMappingTest {
|
||||||
i = i + 1
|
i = i + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
val l = Ilist.linesWithSeparators.map(l =>l.stripLineEnd).next()
|
val l = Ilist.linesWithSeparators.map(l => l.stripLineEnd).next()
|
||||||
|
|
||||||
val item = UnpayWallToOAF.convertToOAF(l)
|
val item = UnpayWallToOAF.convertToOAF(l)
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
@ -27,6 +26,7 @@ import org.slf4j.LoggerFactory;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
|
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
|
|
||||||
public class CleanCountryTest {
|
public class CleanCountryTest {
|
||||||
|
@ -151,41 +151,40 @@ public class CleanCountryTest {
|
||||||
@Test
|
@Test
|
||||||
public void testDatasetClean() throws Exception {
|
public void testDatasetClean() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
spark
|
spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(sourcePath)
|
.textFile(sourcePath)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class),
|
(MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class),
|
||||||
Encoders.bean(Dataset.class))
|
Encoders.bean(Dataset.class))
|
||||||
.write()
|
.write()
|
||||||
.json(workingDir.toString() + "/dataset");
|
.json(workingDir.toString() + "/dataset");
|
||||||
|
|
||||||
CleanCountrySparkJob.main(new String[] {
|
CleanCountrySparkJob.main(new String[] {
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"--inputPath", workingDir.toString() + "/dataset",
|
"--inputPath", workingDir.toString() + "/dataset",
|
||||||
"-graphTableClassName", Dataset.class.getCanonicalName(),
|
"-graphTableClassName", Dataset.class.getCanonicalName(),
|
||||||
"-workingDir", workingDir.toString() + "/working",
|
"-workingDir", workingDir.toString() + "/working",
|
||||||
"-country", "NL",
|
"-country", "NL",
|
||||||
"-verifyParam", "10.17632",
|
"-verifyParam", "10.17632",
|
||||||
"-collectedfrom", "NARCIS",
|
"-collectedfrom", "NARCIS",
|
||||||
"-hostedBy", getClass()
|
"-hostedBy", getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy")
|
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy")
|
||||||
.getPath()
|
.getPath()
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
JavaRDD<Dataset> tmp = sc
|
JavaRDD<Dataset> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/dataset")
|
.textFile(workingDir.toString() + "/dataset")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||||
|
|
||||||
Assertions.assertEquals(1, tmp.count());
|
Assertions.assertEquals(1, tmp.count());
|
||||||
|
|
||||||
Assertions.assertEquals(0, tmp.first().getCountry().size());
|
Assertions.assertEquals(0, tmp.first().getCountry().size());
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,8 @@ class ResolveEntitiesTest extends Serializable {
|
||||||
def generateUpdates(spark: SparkSession): Unit = {
|
def generateUpdates(spark: SparkSession): Unit = {
|
||||||
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
|
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
|
||||||
|
|
||||||
val pids: List[String] = template.linesWithSeparators.map(l =>l.stripLineEnd)
|
val pids: List[String] = template.linesWithSeparators
|
||||||
|
.map(l => l.stripLineEnd)
|
||||||
.map { id =>
|
.map { id =>
|
||||||
val r = new Result
|
val r = new Result
|
||||||
r.setId(id.toLowerCase.trim)
|
r.setId(id.toLowerCase.trim)
|
||||||
|
@ -127,7 +128,7 @@ class ResolveEntitiesTest extends Serializable {
|
||||||
entities.foreach { e =>
|
entities.foreach { e =>
|
||||||
val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString
|
val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString
|
||||||
spark
|
spark
|
||||||
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l =>l.stripLineEnd).toList))
|
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l => l.stripLineEnd).toList))
|
||||||
.as[String]
|
.as[String]
|
||||||
.write
|
.write
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
@ -264,7 +265,8 @@ class ResolveEntitiesTest extends Serializable {
|
||||||
Source
|
Source
|
||||||
.fromInputStream(this.getClass.getResourceAsStream(s"publication"))
|
.fromInputStream(this.getClass.getResourceAsStream(s"publication"))
|
||||||
.mkString
|
.mkString
|
||||||
.linesWithSeparators.map(l =>l.stripLineEnd)
|
.linesWithSeparators
|
||||||
|
.map(l => l.stripLineEnd)
|
||||||
.next(),
|
.next(),
|
||||||
classOf[Publication]
|
classOf[Publication]
|
||||||
)
|
)
|
||||||
|
|
|
@ -47,7 +47,7 @@ class ScholixGraphTest extends AbstractVocabularyTest {
|
||||||
val inputRelations = Source
|
val inputRelations = Source
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary"))
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary"))
|
||||||
.mkString
|
.mkString
|
||||||
val items = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd).toList
|
val items = inputRelations.linesWithSeparators.map(l => l.stripLineEnd).toList
|
||||||
assertNotNull(items)
|
assertNotNull(items)
|
||||||
items.foreach(i => assertTrue(i.nonEmpty))
|
items.foreach(i => assertTrue(i.nonEmpty))
|
||||||
val result =
|
val result =
|
||||||
|
@ -69,7 +69,8 @@ class ScholixGraphTest extends AbstractVocabularyTest {
|
||||||
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
|
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
|
||||||
)
|
)
|
||||||
.mkString
|
.mkString
|
||||||
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd)
|
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators
|
||||||
|
.map(l => l.stripLineEnd)
|
||||||
.sliding(2)
|
.sliding(2)
|
||||||
.map(s => (s.head, s(1)))
|
.map(s => (s.head, s(1)))
|
||||||
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))
|
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))
|
||||||
|
|
Loading…
Reference in New Issue