forked from D-Net/dnet-hadoop
[EOSC BulkTag] refactoring
This commit is contained in:
parent
840465958b
commit
869e129288
|
@ -97,7 +97,8 @@ public class SparkEoscBulkTag implements Serializable {
|
|||
});
|
||||
}
|
||||
|
||||
private static void selectCompliantDatasources(SparkSession spark, String inputPath, String workingPath, String datasourceMapPath) {
|
||||
private static void selectCompliantDatasources(SparkSession spark, String inputPath, String workingPath,
|
||||
String datasourceMapPath) {
|
||||
Dataset<Datasource> datasources = readPath(spark, inputPath + "datasource", Datasource.class)
|
||||
.filter((FilterFunction<Datasource>) ds -> {
|
||||
final String compatibility = ds.getOpenairecompatibility().getClassid();
|
||||
|
@ -109,8 +110,11 @@ public class SparkEoscBulkTag implements Serializable {
|
|||
|
||||
Dataset<DatasourceMaster> datasourceMaster = readPath(spark, datasourceMapPath, DatasourceMaster.class);
|
||||
|
||||
datasources.joinWith(datasourceMaster, datasources.col("id").equalTo(datasourceMaster.col("master")), "left")
|
||||
.map((MapFunction<Tuple2<Datasource, DatasourceMaster>, DatasourceMaster>) t2 -> t2._2(), Encoders.bean(DatasourceMaster.class) )
|
||||
datasources
|
||||
.joinWith(datasourceMaster, datasources.col("id").equalTo(datasourceMaster.col("master")), "left")
|
||||
.map(
|
||||
(MapFunction<Tuple2<Datasource, DatasourceMaster>, DatasourceMaster>) t2 -> t2._2(),
|
||||
Encoders.bean(DatasourceMaster.class))
|
||||
.filter(Objects::nonNull)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
|
|
|
@ -6,8 +6,6 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.eosc.DatasourceMaster;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
|
@ -29,7 +27,9 @@ import org.slf4j.LoggerFactory;
|
|||
*/
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.eosc.DatasourceMaster;
|
||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc (cris)
|
||||
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc (zenodo)
|
||||
|
@ -74,7 +74,7 @@ public class EOSCContextTaggingTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void EoscContextTagTest() throws Exception{
|
||||
void EoscContextTagTest() throws Exception {
|
||||
|
||||
spark
|
||||
.read()
|
||||
|
@ -114,10 +114,12 @@ public class EOSCContextTaggingTest {
|
|||
});
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
Assertions.assertEquals(2, sc
|
||||
Assertions
|
||||
.assertEquals(
|
||||
2, sc
|
||||
.textFile(workingDir.toString() + "/working/datasource")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceMaster.class)).count());
|
||||
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceMaster.class))
|
||||
.count());
|
||||
|
||||
JavaRDD<Dataset> tmp = sc
|
||||
.textFile(workingDir.toString() + "/input/dataset")
|
||||
|
@ -187,7 +189,6 @@ public class EOSCContextTaggingTest {
|
|||
.option("compression", "gzip")
|
||||
.json(workingDir.toString() + "/input/dataset");
|
||||
|
||||
|
||||
spark
|
||||
.read()
|
||||
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource").getPath())
|
||||
|
@ -216,8 +217,6 @@ public class EOSCContextTaggingTest {
|
|||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
|
||||
|
||||
JavaRDD<Dataset> tmp = sc
|
||||
.textFile(workingDir.toString() + "/input/dataset")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||
|
@ -232,7 +231,6 @@ public class EOSCContextTaggingTest {
|
|||
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue