[stats wf] indicators across stats dbs & updates in the org ids #248

Closed
dimitris.pierrakos wants to merge 1742 commits from beta into beta2master_sept_2022
7 changed files with 39 additions and 35 deletions
Showing only changes of commit 8920932dd8 - Show all commits

View File

@ -31,13 +31,13 @@ class CrossrefMappingTest {
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi")) .fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
.mkString .mkString
for (line <- funder_doi.linesWithSeparators.map(l =>l.stripLineEnd)) { for (line <- funder_doi.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line) val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json) val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty) assertTrue(resultList.nonEmpty)
checkRelation(resultList) checkRelation(resultList)
} }
for (line <- funder_name.linesWithSeparators.map(l =>l.stripLineEnd)) { for (line <- funder_name.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line) val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json) val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty) assertTrue(resultList.nonEmpty)

View File

@ -25,9 +25,11 @@ class MappingORCIDToOAFTest {
.mkString .mkString
assertNotNull(json) assertNotNull(json)
assertFalse(json.isEmpty) assertFalse(json.isEmpty)
json.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => { json.linesWithSeparators
assertNotNull(ORCIDToOAF.extractValueFromInputString(s)) .map(l => l.stripLineEnd)
}) .foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
})
} }
@Test @Test

View File

@ -22,7 +22,7 @@ class UnpayWallMappingTest {
.mkString .mkString
var i: Int = 0 var i: Int = 0
for (line <- Ilist.linesWithSeparators.map(l =>l.stripLineEnd)) { for (line <- Ilist.linesWithSeparators.map(l => l.stripLineEnd)) {
val p = UnpayWallToOAF.convertToOAF(line) val p = UnpayWallToOAF.convertToOAF(line)
if (p != null) { if (p != null) {
@ -43,7 +43,7 @@ class UnpayWallMappingTest {
i = i + 1 i = i + 1
} }
val l = Ilist.linesWithSeparators.map(l =>l.stripLineEnd).next() val l = Ilist.linesWithSeparators.map(l => l.stripLineEnd).next()
val item = UnpayWallToOAF.convertToOAF(l) val item = UnpayWallToOAF.convertToOAF(l)

View File

@ -114,7 +114,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
} }
/** /**
* This method is responsible to synch only the mongoMDStore that changed since last time * This method is responsible to sync only the stores that have been changed since last time
* @param mdFormat the MDStore's format * @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout * @param mdLayout the MDStore'slayout
* @param mdInterpretation the MDStore's interpretation * @param mdInterpretation the MDStore's interpretation
@ -164,7 +164,7 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
} }
/** /**
*This methos store into hdfs all the mongo record of a single mdstore *This method store into hdfs all the MONGO record of a single mdstore into the HDFS File
* *
* @param mdFormat the MDStore's format * @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout * @param mdLayout the MDStore'slayout

View File

@ -5,7 +5,6 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -27,6 +26,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob; import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
import eu.dnetlib.dhp.schema.oaf.Dataset;
import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Publication;
public class CleanCountryTest { public class CleanCountryTest {
@ -151,41 +151,40 @@ public class CleanCountryTest {
@Test @Test
public void testDatasetClean() throws Exception { public void testDatasetClean() throws Exception {
final String sourcePath = getClass() final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json") .getResource("/eu/dnetlib/dhp/oa/graph/clean/dataset_clean_country.json")
.getPath(); .getPath();
spark spark
.read() .read()
.textFile(sourcePath) .textFile(sourcePath)
.map( .map(
(MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class), (MapFunction<String, Dataset>) r -> OBJECT_MAPPER.readValue(r, Dataset.class),
Encoders.bean(Dataset.class)) Encoders.bean(Dataset.class))
.write() .write()
.json(workingDir.toString() + "/dataset"); .json(workingDir.toString() + "/dataset");
CleanCountrySparkJob.main(new String[] { CleanCountrySparkJob.main(new String[] {
"--isSparkSessionManaged", Boolean.FALSE.toString(), "--isSparkSessionManaged", Boolean.FALSE.toString(),
"--inputPath", workingDir.toString() + "/dataset", "--inputPath", workingDir.toString() + "/dataset",
"-graphTableClassName", Dataset.class.getCanonicalName(), "-graphTableClassName", Dataset.class.getCanonicalName(),
"-workingDir", workingDir.toString() + "/working", "-workingDir", workingDir.toString() + "/working",
"-country", "NL", "-country", "NL",
"-verifyParam", "10.17632", "-verifyParam", "10.17632",
"-collectedfrom", "NARCIS", "-collectedfrom", "NARCIS",
"-hostedBy", getClass() "-hostedBy", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy") .getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy")
.getPath() .getPath()
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Dataset> tmp = sc JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/dataset") .textFile(workingDir.toString() + "/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
Assertions.assertEquals(1, tmp.count()); Assertions.assertEquals(1, tmp.count());
Assertions.assertEquals(0, tmp.first().getCountry().size()); Assertions.assertEquals(0, tmp.first().getCountry().size());
} }
} }

View File

@ -53,7 +53,8 @@ class ResolveEntitiesTest extends Serializable {
def generateUpdates(spark: SparkSession): Unit = { def generateUpdates(spark: SparkSession): Unit = {
val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString val template = Source.fromInputStream(this.getClass.getResourceAsStream("updates")).mkString
val pids: List[String] = template.linesWithSeparators.map(l =>l.stripLineEnd) val pids: List[String] = template.linesWithSeparators
.map(l => l.stripLineEnd)
.map { id => .map { id =>
val r = new Result val r = new Result
r.setId(id.toLowerCase.trim) r.setId(id.toLowerCase.trim)
@ -127,7 +128,7 @@ class ResolveEntitiesTest extends Serializable {
entities.foreach { e => entities.foreach { e =>
val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString val template = Source.fromInputStream(this.getClass.getResourceAsStream(s"$e")).mkString
spark spark
.createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l =>l.stripLineEnd).toList)) .createDataset(spark.sparkContext.parallelize(template.linesWithSeparators.map(l => l.stripLineEnd).toList))
.as[String] .as[String]
.write .write
.option("compression", "gzip") .option("compression", "gzip")
@ -264,7 +265,8 @@ class ResolveEntitiesTest extends Serializable {
Source Source
.fromInputStream(this.getClass.getResourceAsStream(s"publication")) .fromInputStream(this.getClass.getResourceAsStream(s"publication"))
.mkString .mkString
.linesWithSeparators.map(l =>l.stripLineEnd) .linesWithSeparators
.map(l => l.stripLineEnd)
.next(), .next(),
classOf[Publication] classOf[Publication]
) )

View File

@ -47,7 +47,7 @@ class ScholixGraphTest extends AbstractVocabularyTest {
val inputRelations = Source val inputRelations = Source
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary")) .fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/oaf_to_summary"))
.mkString .mkString
val items = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd).toList val items = inputRelations.linesWithSeparators.map(l => l.stripLineEnd).toList
assertNotNull(items) assertNotNull(items)
items.foreach(i => assertTrue(i.nonEmpty)) items.foreach(i => assertTrue(i.nonEmpty))
val result = val result =
@ -69,7 +69,8 @@ class ScholixGraphTest extends AbstractVocabularyTest {
getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix") getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/merge_result_scholix")
) )
.mkString .mkString
val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators.map(l =>l.stripLineEnd) val result: List[(Relation, ScholixSummary)] = inputRelations.linesWithSeparators
.map(l => l.stripLineEnd)
.sliding(2) .sliding(2)
.map(s => (s.head, s(1))) .map(s => (s.head, s(1)))
.map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary]))) .map(p => (mapper.readValue(p._1, classOf[Relation]), mapper.readValue(p._2, classOf[ScholixSummary])))