[stats wf] indicators across stats dbs & updates in the org ids #248

Closed
dimitris.pierrakos wants to merge 1742 commits from beta into beta2master_sept_2022
7 changed files with 39 additions and 35 deletions
Showing only changes of commit 8920932dd8 - Show all commits

View File

@ -31,13 +31,13 @@ class CrossrefMappingTest {
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
.mkString
for (line <- funder_doi.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- funder_doi.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty)
checkRelation(resultList)
}
for (line <- funder_name.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- funder_name.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty)

View File

@ -25,9 +25,11 @@ class MappingORCIDToOAFTest {
.mkString
assertNotNull(json)
assertFalse(json.isEmpty)
json.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
})
json.linesWithSeparators
.map(l => l.stripLineEnd)
.foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
})
}
@Test

View File

@ -22,7 +22,7 @@ class UnpayWallMappingTest {
.mkString
var i: Int = 0
for (line <- Ilist.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- Ilist.linesWithSeparators.map(l => l.stripLineEnd)) {
val p = UnpayWallToOAF.convertToOAF(line)
if (p != null) {
@ -43,7 +43,7 @@ class UnpayWallMappingTest {
i = i + 1
}
val l = Ilist.linesWithSeparators.map(l =>l.stripLineEnd).next()
val l = Ilist.linesWithSeparators.map(l => l.stripLineEnd).next()
val item = UnpayWallToOAF.convertToOAF(l)

View File

@ -114,7 +114,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
}
/**
* This method is responsible to synch only the mongoMDStore that changed since last time
* This method is responsible to sync only the stores that have been changed since last time
* @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout
* @param mdInterpretation the MDStore's interpretation
@ -164,7 +164,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
}
/**
*This methos store into hdfs all the mongo record of a single mdstore
*This method store into hdfs all the MONGO record of a single mdstore into the HDFS File
*
* @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout

View File

@ -5,7 +5,6 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
@ -27,6 +26,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import eu.dnetlib.dhp.schema.oaf.Publication;
public class CleanCountryTest {
@ -151,41 +151,40 @@ public class CleanCountryTest {
@Test
public void testDatasetClean() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json")
.getPath();
.getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json")
.getPath();
spark
.read()
.textFile(sourcePath)
.map(
(MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class),
Encoders.bean(Dataset.class))
.write()
.json(workingDir.toString() + "/dataset");
.read()
.textFile(sourcePath)
.map(
(MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class),
Encoders.bean(Dataset.class))
.write()
.json(workingDir.toString() + "/dataset");
CleanCountrySparkJob.main(new String[] {
"--isSparkSessionManaged", Boolean.FALSE.toString(),
"--inputPath", workingDir.toString() + "/dataset",
"-graphTableClassName", Dataset.class.getCanonicalName(),
"-workingDir", workingDir.toString() + "/working",
"-country", "NL",
"-verifyParam", "10.17632",
"-collectedfrom", "NARCIS",
"-hostedBy", getClass()
"--isSparkSessionManaged", Boolean.FALSE.toString(),
"--inputPath", workingDir.toString() + "/dataset",
"-graphTableClassName", Dataset.class.getCanonicalName(),
"-workingDir", workingDir.toString() + "/working",
"-country", "NL",
"-verifyParam", "10.17632",
"-collectedfrom", "NARCIS",
"-hostedBy", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy")
.getPath()
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
.textFile(workingDir.toString() + "/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
Assertions.assertEquals(1, tmp.count());
Assertions.assertEquals(0, tmp.first().getCountry().size());
}
}

View File

@ -53,7 +53,8 @@ class ResolveEntitiesTest extends Serializable {
def generateUpdates(spark: SparkSession): Unit = {
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
val pids: List[String] = template.linesWithSeparators.map(l =>l.stripLineEnd)
val pids: List[String] = template.linesWithSeparators
.map(l => l.stripLineEnd)
.map { id =>
val r = new Result
r.setId(id.toLowerCase.trim)
@ -127,7 +128,7 @@ class ResolveEntitiesTest extends Serializable {
entities.foreach { e =>
val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString
spark
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l =>l.stripLineEnd).toList))
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l => l.stripLineEnd).toList))
.as[String]
.write
.option("compression", "gzip")
@ -264,7 +265,8 @@ class ResolveEntitiesTest extends Serializable {
Source
.fromInputStream(this.getClass.getResourceAsStream(s"publication"))
.mkString
.linesWithSeparators.map(l =>l.stripLineEnd)
.linesWithSeparators
.map(l => l.stripLineEnd)
.next(),
classOf[Publication]
)

View File

@ -47,7 +47,7 @@ class ScholixGraphTest extends AbstractVocabularyTest {
val inputRelations = Source
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary"))
.mkString
val items = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd).toList
val items = inputRelations.linesWithSeparators.map(l => l.stripLineEnd).toList
assertNotNull(items)
items.foreach(i => assertTrue(i.nonEmpty))
val result =
@ -69,7 +69,8 @@ class ScholixGraphTest extends AbstractVocabularyTest {
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
)
.mkString
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd)
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators
.map(l => l.stripLineEnd)
.sliding(2)
.map(s => (s.head, s(1)))
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))