forked from D-Net/dnet-hadoop
[graph cleaning] testing the collectedfron and hostedby patch procedure
This commit is contained in:
parent
58c05731f9
commit
8e3edba318
|
@ -25,11 +25,13 @@ import org.slf4j.LoggerFactory;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.common.action.model.MasterDuplicate;
|
import eu.dnetlib.dhp.common.action.model.MasterDuplicate;
|
||||||
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
|
import eu.dnetlib.dhp.oa.graph.clean.country.CleanCountrySparkJob;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
import eu.dnetlib.dhp.schema.oaf.Instance;
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class CleanCfHbSparkJob {
|
public class CleanCfHbSparkJob {
|
||||||
|
@ -76,6 +78,8 @@ public class CleanCfHbSparkJob {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
|
HdfsSupport.remove(resolvedPath, spark.sparkContext().hadoopConfiguration());
|
||||||
cleanCfHb(
|
cleanCfHb(
|
||||||
spark, inputPath, entityClazz, resolvedPath, dsMasterDuplicatePath, outputPath);
|
spark, inputPath, entityClazz, resolvedPath, dsMasterDuplicatePath, outputPath);
|
||||||
});
|
});
|
||||||
|
@ -95,30 +99,12 @@ public class CleanCfHbSparkJob {
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputPath)
|
.textFile(inputPath)
|
||||||
.map(as(entityClazz), Encoders.bean(entityClazz))
|
.map(as(entityClazz), Encoders.bean(entityClazz))
|
||||||
.flatMap(
|
.flatMap(flattenCfHbFn(), Encoders.bean(IdCfHbMapping.class));
|
||||||
(FlatMapFunction<T, IdCfHbMapping>) r -> {
|
|
||||||
final List<IdCfHbMapping> list = Stream
|
|
||||||
.concat(
|
|
||||||
r.getCollectedfrom().stream().map(KeyValue::getKey),
|
|
||||||
Stream
|
|
||||||
.concat(
|
|
||||||
r.getInstance().stream().map(Instance::getHostedby).map(KeyValue::getKey),
|
|
||||||
r.getInstance().stream().map(Instance::getCollectedfrom).map(KeyValue::getKey)))
|
|
||||||
.distinct()
|
|
||||||
.map(s -> asIdCfHbMapping(r.getId(), s))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
return list.iterator();
|
|
||||||
},
|
|
||||||
Encoders.bean(IdCfHbMapping.class));
|
|
||||||
|
|
||||||
// set the EMPTY master ID/NAME and save it
|
// set the EMPTY master ID/NAME and save it
|
||||||
resolved
|
resolved
|
||||||
.joinWith(md, resolved.col("cfhb").equalTo(md.col("duplicateId")))
|
.joinWith(md, resolved.col("cfhb").equalTo(md.col("duplicateId")))
|
||||||
.map((MapFunction<Tuple2<IdCfHbMapping, MasterDuplicate>, IdCfHbMapping>) t -> {
|
.map(asIdCfHbMapping(), Encoders.bean(IdCfHbMapping.class))
|
||||||
t._1().setMasterId(t._2().getMasterId());
|
|
||||||
t._1().setMasterName(t._2().getMasterName());
|
|
||||||
return t._1();
|
|
||||||
}, Encoders.bean(IdCfHbMapping.class))
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.json(resolvedPath);
|
.json(resolvedPath);
|
||||||
|
@ -140,18 +126,37 @@ public class CleanCfHbSparkJob {
|
||||||
.joinWith(resolvedDS, res.col("id").equalTo(resolvedDS.col("resultId")), "left")
|
.joinWith(resolvedDS, res.col("id").equalTo(resolvedDS.col("resultId")), "left")
|
||||||
.groupByKey((MapFunction<Tuple2<T, IdCfHbMapping>, String>) t -> t._1().getId(), Encoders.STRING())
|
.groupByKey((MapFunction<Tuple2<T, IdCfHbMapping>, String>) t -> t._1().getId(), Encoders.STRING())
|
||||||
.mapGroups(getMapGroupsFunction(), Encoders.bean(entityClazz))
|
.mapGroups(getMapGroupsFunction(), Encoders.bean(entityClazz))
|
||||||
//.agg(new IdCfHbMappingAggregator(entityClazz).toColumn())
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath);
|
.json(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
@NotNull
|
private static MapFunction<Tuple2<IdCfHbMapping, MasterDuplicate>, IdCfHbMapping> asIdCfHbMapping() {
|
||||||
|
return t -> {
|
||||||
|
t._1().setMasterId(t._2().getMasterId());
|
||||||
|
t._1().setMasterName(t._2().getMasterName());
|
||||||
|
return t._1();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <T extends Result> FlatMapFunction<T, IdCfHbMapping> flattenCfHbFn() {
|
||||||
|
return r -> Stream
|
||||||
|
.concat(
|
||||||
|
r.getCollectedfrom().stream().map(KeyValue::getKey),
|
||||||
|
Stream
|
||||||
|
.concat(
|
||||||
|
r.getInstance().stream().map(Instance::getHostedby).map(KeyValue::getKey),
|
||||||
|
r.getInstance().stream().map(Instance::getCollectedfrom).map(KeyValue::getKey)))
|
||||||
|
.distinct()
|
||||||
|
.map(s -> asIdCfHbMapping(r.getId(), s))
|
||||||
|
.iterator();
|
||||||
|
}
|
||||||
|
|
||||||
private static <T extends Result> MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T> getMapGroupsFunction() {
|
private static <T extends Result> MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T> getMapGroupsFunction() {
|
||||||
return new MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T>() {
|
return new MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T>() {
|
||||||
@Override
|
@Override
|
||||||
public T call(String key, Iterator<Tuple2<T, IdCfHbMapping>> values) throws Exception {
|
public T call(String key, Iterator<Tuple2<T, IdCfHbMapping>> values) {
|
||||||
final Tuple2<T, IdCfHbMapping> first = values.next();
|
final Tuple2<T, IdCfHbMapping> first = values.next();
|
||||||
final T res = first._1();
|
final T res = first._1();
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.clean.cfhb;
|
package eu.dnetlib.dhp.oa.graph.clean.cfhb;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
@ -13,12 +21,10 @@ import org.junit.jupiter.api.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.File;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.URISyntaxException;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
import java.nio.file.Files;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.nio.file.Paths;
|
|
||||||
|
|
||||||
public class CleanCfHbSparkJobTest {
|
public class CleanCfHbSparkJobTest {
|
||||||
|
|
||||||
|
@ -56,11 +62,13 @@ public class CleanCfHbSparkJobTest {
|
||||||
FileUtils
|
FileUtils
|
||||||
.copyFileToDirectory(
|
.copyFileToDirectory(
|
||||||
Paths
|
Paths
|
||||||
.get(CleanCfHbSparkJobTest.class.getResource("/eu/dnetlib/dhp/oa/graph/clean/cfhb/masterduplicate.json").toURI())
|
.get(
|
||||||
|
CleanCfHbSparkJobTest.class
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/clean/cfhb/masterduplicate.json")
|
||||||
|
.toURI())
|
||||||
.toFile(),
|
.toFile(),
|
||||||
testBaseTmpPath.resolve("workingDir").resolve("masterduplicate").toFile());
|
testBaseTmpPath.resolve("workingDir").resolve("masterduplicate").toFile());
|
||||||
|
|
||||||
|
|
||||||
graphInputPath = testBaseTmpPath.resolve("input").resolve("entities").toString();
|
graphInputPath = testBaseTmpPath.resolve("input").resolve("entities").toString();
|
||||||
resolvedPath = testBaseTmpPath.resolve("workingDir").resolve("cfHbResolved").toString();
|
resolvedPath = testBaseTmpPath.resolve("workingDir").resolve("cfHbResolved").toString();
|
||||||
graphOutputPath = testBaseTmpPath.resolve("workingDir").resolve("cfHbPatched").toString();
|
graphOutputPath = testBaseTmpPath.resolve("workingDir").resolve("cfHbPatched").toString();
|
||||||
|
@ -89,27 +97,114 @@ public class CleanCfHbSparkJobTest {
|
||||||
@Test
|
@Test
|
||||||
void testCleanCfHbSparkJob() throws Exception {
|
void testCleanCfHbSparkJob() throws Exception {
|
||||||
final String outputPath = graphOutputPath + "/dataset";
|
final String outputPath = graphOutputPath + "/dataset";
|
||||||
|
final String inputPath = graphInputPath + "/dataset";
|
||||||
|
|
||||||
|
org.apache.spark.sql.Dataset<Dataset> records = read(spark, inputPath, Dataset.class);
|
||||||
|
Dataset d = records
|
||||||
|
.filter("id = '50|doi_________::09821844208a5cd6300b2bfb13bca1b9'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|re3data_____::4c4416659cb74c2e0e891a883a047cbc", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("Bacterial Protein Interaction Database - DUP", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|re3data_____::4c4416659cb74c2e0e891a883a047cbc", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals(
|
||||||
|
"Bacterial Protein Interaction Database - DUP", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|DansKnawCris::0dd644304b7116e8e58da3a5e3adc37a'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|opendoar____::788b4ac1e172d8e520c2b9461c0a3d35", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("FILUR DATA - DUP", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|opendoar____::788b4ac1e172d8e520c2b9461c0a3d35", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals("FILUR DATA - DUP", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|re3data_____::6ffd7bc058f762912dc494cd9c175341", d.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals("depositar - DUP", d.getInstance().get(0).getHostedby().getValue());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|DansKnawCris::203a27996ddc0fd1948258e5b7dec61c'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals(
|
||||||
|
"DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getHostedby().getValue());
|
||||||
|
|
||||||
CleanCfHbSparkJob
|
CleanCfHbSparkJob
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"--inputPath", graphInputPath + "/dataset",
|
"--inputPath", inputPath,
|
||||||
"--outputPath", outputPath,
|
"--outputPath", outputPath,
|
||||||
"--resolvedPath", resolvedPath + "/dataset",
|
"--resolvedPath", resolvedPath + "/dataset",
|
||||||
"--graphTableClassName", Dataset.class.getCanonicalName(),
|
"--graphTableClassName", Dataset.class.getCanonicalName(),
|
||||||
"--datasourceMasterDuplicate", dsMasterDuplicatePath
|
"--datasourceMasterDuplicate", dsMasterDuplicatePath
|
||||||
});
|
});
|
||||||
|
|
||||||
//final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
assertTrue(Files.exists(Paths.get(graphOutputPath, "dataset")));
|
||||||
|
|
||||||
Assertions.assertTrue(Files.exists(Paths.get(graphOutputPath, "dataset")));
|
records = read(spark, outputPath, Dataset.class);
|
||||||
|
|
||||||
final org.apache.spark.sql.Dataset<Dataset> d = spark
|
assertEquals(3, records.count());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|doi_________::09821844208a5cd6300b2bfb13bca1b9'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|fairsharing_::a29d1598024f9e87beab4b98411d48ce", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("Bacterial Protein Interaction Database", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|fairsharing_::a29d1598024f9e87beab4b98411d48ce", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals("Bacterial Protein Interaction Database", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|DansKnawCris::0dd644304b7116e8e58da3a5e3adc37a'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|re3data_____::fc1db64b3964826913b1e9eafe830490", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("FULIR Data", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|re3data_____::fc1db64b3964826913b1e9eafe830490", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals("FULIR Data", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|fairsharing_::3f647cadf56541fb9513cb63ec370187", d.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals("depositar", d.getInstance().get(0).getHostedby().getValue());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|DansKnawCris::203a27996ddc0fd1948258e5b7dec61c'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals(
|
||||||
|
"DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getHostedby().getValue());
|
||||||
|
|
||||||
|
d = records
|
||||||
|
.filter("id = '50|DansKnawCris::203a27996ddc0fd1948258e5b7dec61c'")
|
||||||
|
.first();
|
||||||
|
assertEquals("10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getCollectedfrom().get(0).getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getCollectedfrom().getKey());
|
||||||
|
assertEquals(
|
||||||
|
"DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getCollectedfrom().getValue());
|
||||||
|
assertEquals(
|
||||||
|
"10|openaire____::c6df70599aa984f16ee52b4b86d2e89f", d.getInstance().get(0).getHostedby().getKey());
|
||||||
|
assertEquals("DANS (Data Archiving and Networked Services)", d.getInstance().get(0).getHostedby().getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
private <R> org.apache.spark.sql.Dataset<R> read(SparkSession spark, String path, Class<R> clazz) {
|
||||||
|
return spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(outputPath)
|
.textFile(path)
|
||||||
.map(as(Dataset.class), Encoders.bean(Dataset.class));
|
.map(as(clazz), Encoders.bean(clazz));
|
||||||
Assertions.assertEquals(3, d.count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R> MapFunction<String, R> as(Class<R> clazz) {
|
private static <R> MapFunction<String, R> as(Class<R> clazz) {
|
||||||
|
|
Loading…
Reference in New Issue