1
0
Fork 0

creating temporary directories in dedup test

This commit is contained in:
miconis 2020-04-16 12:13:26 +02:00
parent 011b342bc9
commit cd4d9a148f
4 changed files with 23 additions and 16 deletions

View File

@ -24,7 +24,7 @@ abstract class AbstractSparkAction implements Serializable {
public ArgumentApplicationParser parser; //parameters for the spark action public ArgumentApplicationParser parser; //parameters for the spark action
public SparkSession spark; //the spark session public SparkSession spark; //the spark session
public AbstractSparkAction(ArgumentApplicationParser parser, SparkSession spark) throws Exception { public AbstractSparkAction(ArgumentApplicationParser parser, SparkSession spark) {
this.parser = parser; this.parser = parser;
this.spark = spark; this.spark = spark;

View File

@ -16,7 +16,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.PairFunction; import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.graphx.Edge; import org.apache.spark.graphx.Edge;
import org.apache.spark.rdd.RDD; import org.apache.spark.rdd.RDD;
@ -31,11 +30,11 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
public class SparkCreateConnectedComponent extends AbstractSparkAction { public class SparkCreateMergeRels extends AbstractSparkAction {
private static final Log log = LogFactory.getLog(SparkCreateConnectedComponent.class); private static final Log log = LogFactory.getLog(SparkCreateMergeRels.class);
public SparkCreateConnectedComponent(ArgumentApplicationParser parser, SparkSession spark) throws Exception { public SparkCreateMergeRels(ArgumentApplicationParser parser, SparkSession spark) throws Exception {
super(parser, spark); super(parser, spark);
} }
@ -45,7 +44,7 @@ public class SparkCreateConnectedComponent extends AbstractSparkAction {
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"))); SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
new SparkCreateConnectedComponent(parser, getSparkSession(parser)).run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl"))); new SparkCreateMergeRels(parser, getSparkSession(parser)).run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
} }
@Override @Override

View File

@ -86,7 +86,7 @@
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>Create Merge Relations</name> <name>Create Merge Relations</name>
<class>eu.dnetlib.dhp.oa.dedup.SparkCreateConnectedComponent</class> <class>eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels</class>
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar> <jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-memory ${sparkExecutorMemory} --executor-memory ${sparkExecutorMemory}

View File

@ -22,6 +22,7 @@ import java.io.Serializable;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.file.Paths; import java.nio.file.Paths;
import static java.nio.file.Files.createTempDirectory;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.lenient;
@ -36,16 +37,20 @@ public class SparkDedupTest implements Serializable {
private static JavaSparkContext jsc; private static JavaSparkContext jsc;
private static String testGraphBasePath; private static String testGraphBasePath;
private final static String testOutputBasePath = "/tmp/test_dedup_workflow"; private static String testOutputBasePath = "/tmp/test_dedup_workflow";
private final static String testActionSetId = "test-orchestrator"; private final static String testActionSetId = "test-orchestrator";
private final static String testDedupGraphBasePath = "/tmp/test_dedup_workflow/dedup_graph"; private static String testDedupGraphBasePath = "/tmp/test_dedup_workflow/dedup_graph";
@BeforeAll @BeforeAll
private static void cleanUp() throws IOException, URISyntaxException { private static void cleanUp() throws IOException, URISyntaxException {
testGraphBasePath = Paths.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI()).toFile().getAbsolutePath(); testGraphBasePath = Paths.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI()).toFile().getAbsolutePath();
testOutputBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
testDedupGraphBasePath = createTempDirectory(SparkDedupTest.class.getSimpleName() + "-").toAbsolutePath().toString();
FileUtils.deleteDirectory(new File(testOutputBasePath)); FileUtils.deleteDirectory(new File(testOutputBasePath));
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
spark = SparkSession spark = SparkSession
.builder() .builder()
@ -54,7 +59,7 @@ public class SparkDedupTest implements Serializable {
.config(new SparkConf()) .config(new SparkConf())
.getOrCreate(); .getOrCreate();
jsc = new JavaSparkContext(spark.sparkContext()); jsc = JavaSparkContext.fromSparkContext(spark.sparkContext());
} }
@ -69,7 +74,6 @@ public class SparkDedupTest implements Serializable {
lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication"))) lenient().when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication")))
.thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json"))); .thenReturn(IOUtils.toString(SparkDedupTest.class.getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json")));
} }
@Test @Test
@ -97,11 +101,11 @@ public class SparkDedupTest implements Serializable {
@Test @Test
@Order(2) @Order(2)
public void createCCTest() throws Exception { public void createMergeRelsTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString( IOUtils.toString(
SparkCreateConnectedComponent.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json"))); SparkCreateMergeRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
parser.parseArgument(new String[]{ parser.parseArgument(new String[]{
"-mt", "local[*]", "-mt", "local[*]",
"-i", testGraphBasePath, "-i", testGraphBasePath,
@ -109,7 +113,7 @@ public class SparkDedupTest implements Serializable {
"-la", "lookupurl", "-la", "lookupurl",
"-w", testOutputBasePath}); "-w", testOutputBasePath});
new SparkCreateConnectedComponent(parser, spark).run(isLookUpService); new SparkCreateMergeRels(parser, spark).run(isLookUpService);
long orgs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel").count(); long orgs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel").count();
long pubs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel").count(); long pubs_mergerel = spark.read().load(testOutputBasePath + "/" + testActionSetId + "/publication_mergerel").count();
@ -185,8 +189,6 @@ public class SparkDedupTest implements Serializable {
assertEquals(mergedOrgs, deletedOrgs); assertEquals(mergedOrgs, deletedOrgs);
assertEquals(mergedPubs, deletedPubs); assertEquals(mergedPubs, deletedPubs);
//TODO check the size of other entities not deduplicated
} }
@Test @Test
@ -211,6 +213,12 @@ public class SparkDedupTest implements Serializable {
} }
@AfterAll
public static void finalCleanUp() throws IOException {
FileUtils.deleteDirectory(new File(testOutputBasePath));
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
}
public boolean isDeletedByInference(String s) { public boolean isDeletedByInference(String s) {
return s.contains("\"deletedbyinference\":true"); return s.contains("\"deletedbyinference\":true");
} }