1
0
Fork 0

[EOSC BulkTag] refactoring

This commit is contained in:
Miriam Baglioni 2022-09-20 16:13:18 +02:00
parent 840465958b
commit 869e129288
2 changed files with 110 additions and 108 deletions

View File

@ -97,25 +97,29 @@ public class SparkEoscBulkTag implements Serializable {
}); });
} }
private static void selectCompliantDatasources(SparkSession spark, String inputPath, String workingPath, String datasourceMapPath) { private static void selectCompliantDatasources(SparkSession spark, String inputPath, String workingPath,
String datasourceMapPath) {
Dataset<Datasource> datasources = readPath(spark, inputPath + "datasource", Datasource.class) Dataset<Datasource> datasources = readPath(spark, inputPath + "datasource", Datasource.class)
.filter((FilterFunction<Datasource>) ds -> { .filter((FilterFunction<Datasource>) ds -> {
final String compatibility = ds.getOpenairecompatibility().getClassid(); final String compatibility = ds.getOpenairecompatibility().getClassid();
return compatibility.equalsIgnoreCase(OPENAIRE_3) || return compatibility.equalsIgnoreCase(OPENAIRE_3) ||
compatibility.equalsIgnoreCase(OPENAIRE_4) || compatibility.equalsIgnoreCase(OPENAIRE_4) ||
compatibility.equalsIgnoreCase(OPENAIRE_CRIS) || compatibility.equalsIgnoreCase(OPENAIRE_CRIS) ||
compatibility.equalsIgnoreCase(OPENAIRE_DATA); compatibility.equalsIgnoreCase(OPENAIRE_DATA);
}); });
Dataset<DatasourceMaster> datasourceMaster = readPath(spark, datasourceMapPath, DatasourceMaster.class); Dataset<DatasourceMaster> datasourceMaster = readPath(spark, datasourceMapPath, DatasourceMaster.class);
datasources.joinWith(datasourceMaster, datasources.col("id").equalTo(datasourceMaster.col("master")), "left") datasources
.map((MapFunction<Tuple2<Datasource, DatasourceMaster>, DatasourceMaster>) t2 -> t2._2(), Encoders.bean(DatasourceMaster.class) ) .joinWith(datasourceMaster, datasources.col("id").equalTo(datasourceMaster.col("master")), "left")
.filter(Objects::nonNull) .map(
.write() (MapFunction<Tuple2<Datasource, DatasourceMaster>, DatasourceMaster>) t2 -> t2._2(),
.mode(SaveMode.Overwrite) Encoders.bean(DatasourceMaster.class))
.option("compression", "gzip") .filter(Objects::nonNull)
.json(workingPath + "datasource"); .write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(workingPath + "datasource");
} }
private static <R extends Result> void execBulkTag( private static <R extends Result> void execBulkTag(
@ -130,7 +134,7 @@ public class SparkEoscBulkTag implements Serializable {
.collectAsList(); .collectAsList();
readPath(spark, inputPath + resultType, resultClazz) readPath(spark, inputPath + resultType, resultClazz)
.map( .map(
(MapFunction<R, R>) value -> enrich(value, hostedByList), (MapFunction<R, R>) value -> enrich(value, hostedByList),
Encoders.bean(resultClazz)) Encoders.bean(resultClazz))
.write() .write()

View File

@ -6,8 +6,6 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.bulktag.eosc.DatasourceMaster;
import eu.dnetlib.dhp.schema.oaf.*;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -29,7 +27,9 @@ import org.slf4j.LoggerFactory;
*/ */
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.bulktag.eosc.DatasourceMaster;
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag; import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
import eu.dnetlib.dhp.schema.oaf.*;
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc (cris) //"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc (cris)
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc (zenodo) //"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc (zenodo)
@ -74,103 +74,105 @@ public class EOSCContextTaggingTest {
} }
@Test @Test
void EoscContextTagTest() throws Exception{ void EoscContextTagTest() throws Exception {
spark spark
.read() .read()
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource_1").getPath()) .textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource_1").getPath())
.map( .map(
(MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class), (MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class),
Encoders.bean(Datasource.class)) Encoders.bean(Datasource.class))
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.option("compression", "gzip") .option("compression", "gzip")
.json(workingDir.toString() + "/input/datasource"); .json(workingDir.toString() + "/input/datasource");
spark spark
.read() .read()
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath()) .textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
.map( .map(
(MapFunction<String, Dataset>) value -> OBJECT_MAPPER.readValue(value, Dataset.class), (MapFunction<String, Dataset>) value -> OBJECT_MAPPER.readValue(value, Dataset.class),
Encoders.bean(Dataset.class)) Encoders.bean(Dataset.class))
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.option("compression", "gzip") .option("compression", "gzip")
.json(workingDir.toString() + "/input/dataset"); .json(workingDir.toString() + "/input/dataset");
SparkEoscBulkTag SparkEoscBulkTag
.main( .main(
new String[] { new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(), "-isSparkSessionManaged", Boolean.FALSE.toString(),
"-sourcePath", "-sourcePath",
workingDir.toString() + "/input/", workingDir.toString() + "/input/",
"-workingPath", workingDir.toString() + "/working/", "-workingPath", workingDir.toString() + "/working/",
"-datasourceMapPath", "-datasourceMapPath",
getClass() getClass()
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster") .getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
.getPath(), .getPath(),
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset", "-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
"-resultType", "dataset" "-resultType", "dataset"
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
Assertions.assertEquals(2, sc Assertions
.textFile(workingDir.toString() + "/working/datasource") .assertEquals(
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceMaster.class)).count()); 2, sc
.textFile(workingDir.toString() + "/working/datasource")
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceMaster.class))
.count());
JavaRDD<Dataset> tmp = sc JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/input/dataset") .textFile(workingDir.toString() + "/input/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
Assertions.assertEquals(10, tmp.count()); Assertions.assertEquals(10, tmp.count());
Assertions Assertions
.assertEquals( .assertEquals(
2, 2,
tmp tmp
.filter( .filter(
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
Assertions Assertions
.assertEquals( .assertEquals(
1, 1,
tmp tmp
.filter( .filter(
d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea") d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea")
&& &&
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
Assertions Assertions
.assertEquals( .assertEquals(
1, 1,
tmp tmp
.filter( .filter(
d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1") d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1")
&& &&
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
Assertions Assertions
.assertEquals( .assertEquals(
0, 0,
tmp tmp
.filter( .filter(
d -> d.getId().equals("50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7") d -> d.getId().equals("50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7")
&& &&
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
Assertions Assertions
.assertEquals( .assertEquals(
0, 0,
tmp tmp
.filter( .filter(
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb") d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
&& &&
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
} }
@Test @Test
@ -187,17 +189,16 @@ public class EOSCContextTaggingTest {
.option("compression", "gzip") .option("compression", "gzip")
.json(workingDir.toString() + "/input/dataset"); .json(workingDir.toString() + "/input/dataset");
spark spark
.read() .read()
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource").getPath()) .textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource").getPath())
.map( .map(
(MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class), (MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class),
Encoders.bean(Datasource.class)) Encoders.bean(Datasource.class))
.write() .write()
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.option("compression", "gzip") .option("compression", "gzip")
.json(workingDir.toString() + "/input/datasource"); .json(workingDir.toString() + "/input/datasource");
SparkEoscBulkTag SparkEoscBulkTag
.main( .main(
@ -211,13 +212,11 @@ public class EOSCContextTaggingTest {
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster") .getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
.getPath(), .getPath(),
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset", "-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
"-resultType", "dataset" "-resultType", "dataset"
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Dataset> tmp = sc JavaRDD<Dataset> tmp = sc
.textFile(workingDir.toString() + "/input/dataset") .textFile(workingDir.toString() + "/input/dataset")
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class)); .map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
@ -232,7 +231,6 @@ public class EOSCContextTaggingTest {
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
.count()); .count());
} }
} }