forked from D-Net/dnet-hadoop
SparkJobTest was failing because testing workingdir was not cleaned up after eact test
This commit is contained in:
parent
5e15f20e6e
commit
54c1eacef1
|
@ -11,8 +11,10 @@ import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
@ -58,6 +60,11 @@ public class SparkJobTest {
|
||||||
.getOrCreate();
|
.getOrCreate();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void afterEach() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
}
|
||||||
|
|
||||||
@AfterAll
|
@AfterAll
|
||||||
public static void afterAll() throws IOException {
|
public static void afterAll() throws IOException {
|
||||||
FileUtils.deleteDirectory(workingDir.toFile());
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
@ -91,16 +98,19 @@ public class SparkJobTest {
|
||||||
|
|
||||||
readPath(spark, leavesPath, Leaves.class)
|
readPath(spark, leavesPath, Leaves.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/leavesInput");
|
.json(workingDir.toString() + "/leavesInput");
|
||||||
|
|
||||||
readPath(spark, resultOrgPath, KeyValueSet.class)
|
readPath(spark, resultOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/orgsInput");
|
.json(workingDir.toString() + "/orgsInput");
|
||||||
|
|
||||||
readPath(spark, projectOrgPath, KeyValueSet.class)
|
readPath(spark, projectOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/projectInput");
|
.json(workingDir.toString() + "/projectInput");
|
||||||
|
|
||||||
|
@ -369,16 +379,19 @@ public class SparkJobTest {
|
||||||
|
|
||||||
readPath(spark, leavesPath, Leaves.class)
|
readPath(spark, leavesPath, Leaves.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/leavesInput");
|
.json(workingDir.toString() + "/leavesInput");
|
||||||
|
|
||||||
readPath(spark, resultOrgPath, KeyValueSet.class)
|
readPath(spark, resultOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/orgsInput");
|
.json(workingDir.toString() + "/orgsInput");
|
||||||
|
|
||||||
readPath(spark, projectOrgPath, KeyValueSet.class)
|
readPath(spark, projectOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/projectInput");
|
.json(workingDir.toString() + "/projectInput");
|
||||||
|
|
||||||
|
@ -649,16 +662,19 @@ public class SparkJobTest {
|
||||||
|
|
||||||
readPath(spark, leavesPath, Leaves.class)
|
readPath(spark, leavesPath, Leaves.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/leavesInput");
|
.json(workingDir.toString() + "/leavesInput");
|
||||||
|
|
||||||
readPath(spark, resultOrgPath, KeyValueSet.class)
|
readPath(spark, resultOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/orgsInput");
|
.json(workingDir.toString() + "/orgsInput");
|
||||||
|
|
||||||
readPath(spark, projectOrgPath, KeyValueSet.class)
|
readPath(spark, projectOrgPath, KeyValueSet.class)
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingDir.toString() + "/projectInput");
|
.json(workingDir.toString() + "/projectInput");
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue