forked from D-Net/dnet-hadoop
[EOSC context TAG] refactoring after compilation
This commit is contained in:
parent
ffb0ce3fb9
commit
06a95daf60
|
@ -93,14 +93,10 @@ public class SparkEoscBulkTag implements Serializable {
|
|||
String datasourceMapPath,
|
||||
Class<R> resultClazz) {
|
||||
|
||||
|
||||
List<String> hostedByList = readPath(spark, datasourceMapPath, DatasourceMaster.class)
|
||||
.map((MapFunction<DatasourceMaster, String>) dm -> dm.getMaster(), Encoders.STRING())
|
||||
.collectAsList();
|
||||
|
||||
|
||||
|
||||
|
||||
readPath(spark, inputPath, resultClazz)
|
||||
.map(patchResult(), Encoders.bean(resultClazz))
|
||||
.filter(Objects::nonNull)
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag;
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 22/07/22
|
||||
*/
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
|
@ -26,10 +21,17 @@ import org.junit.jupiter.api.Test;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 22/07/22
|
||||
*/
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc
|
||||
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc
|
||||
|
@ -87,7 +89,6 @@ public class EOSCContextTaggingTest {
|
|||
.option("compression", "gzip")
|
||||
.json(workingDir.toString() + "/input/dataset");
|
||||
|
||||
|
||||
SparkEoscBulkTag
|
||||
.main(
|
||||
new String[] {
|
||||
|
@ -95,7 +96,10 @@ public class EOSCContextTaggingTest {
|
|||
"-sourcePath",
|
||||
workingDir.toString() + "/input/dataset",
|
||||
"-workingPath", workingDir.toString() + "/working/dataset",
|
||||
"-datasourceMapPath", getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster").getPath(),
|
||||
"-datasourceMapPath",
|
||||
getClass()
|
||||
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
||||
.getPath(),
|
||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset"
|
||||
});
|
||||
|
||||
|
@ -116,37 +120,43 @@ public class EOSCContextTaggingTest {
|
|||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(1,
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea")
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))).count()
|
||||
);
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(1,
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1")
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))).count()
|
||||
);
|
||||
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(1,
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))).count()
|
||||
);
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(1,
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))).count()
|
||||
);
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue