2020-04-27 14:52:31 +02:00
|
|
|
|
2020-04-08 13:24:43 +02:00
|
|
|
package eu.dnetlib.dhp.oa.graph;
|
|
|
|
|
2020-04-18 12:42:58 +02:00
|
|
|
import java.io.IOException;
|
|
|
|
import java.nio.file.Files;
|
|
|
|
import java.nio.file.Path;
|
2020-04-28 11:23:29 +02:00
|
|
|
|
2020-04-08 13:24:43 +02:00
|
|
|
import org.apache.commons.io.FileUtils;
|
|
|
|
import org.apache.commons.lang3.RandomStringUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
2020-04-10 17:53:07 +02:00
|
|
|
import org.junit.jupiter.api.AfterAll;
|
|
|
|
import org.junit.jupiter.api.Assertions;
|
|
|
|
import org.junit.jupiter.api.BeforeAll;
|
|
|
|
import org.junit.jupiter.api.Test;
|
2020-04-08 13:24:43 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2020-04-28 11:23:29 +02:00
|
|
|
import eu.dnetlib.dhp.oa.graph.hive.GraphHiveImporterJob;
|
|
|
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
|
|
|
2020-04-08 13:24:43 +02:00
|
|
|
public class GraphHiveImporterJobTest {
|
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(GraphHiveImporterJobTest.class);
|
|
|
|
|
|
|
|
public static final String JDBC_DERBY_TEMPLATE = "jdbc:derby:;databaseName=%s/junit_metastore_db;create=true";
|
|
|
|
|
|
|
|
private static SparkSession spark;
|
|
|
|
|
|
|
|
private static Path workingDir;
|
|
|
|
|
|
|
|
private static String dbName;
|
|
|
|
|
|
|
|
@BeforeAll
|
|
|
|
public static void beforeAll() throws IOException {
|
|
|
|
workingDir = Files.createTempDirectory(GraphHiveImporterJobTest.class.getSimpleName());
|
|
|
|
log.info("using work dir {}", workingDir);
|
|
|
|
|
|
|
|
dbName = RandomStringUtils.randomAlphabetic(5);
|
|
|
|
log.info("using DB name {}", "test_" + dbName);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
conf.setAppName(GraphHiveImporterJobTest.class.getSimpleName());
|
|
|
|
|
|
|
|
conf.setMaster("local[*]");
|
|
|
|
conf.set("spark.driver.host", "localhost");
|
|
|
|
conf.set("hive.metastore.local", "true");
|
|
|
|
conf.set("spark.ui.enabled", "false");
|
|
|
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
|
|
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
|
|
|
conf
|
|
|
|
.set(
|
|
|
|
"javax.jdo.option.ConnectionURL",
|
2021-05-14 10:58:12 +02:00
|
|
|
String.format(JDBC_DERBY_TEMPLATE, workingDir.resolve("warehouse")));
|
2020-04-27 14:52:31 +02:00
|
|
|
|
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(GraphHiveImporterJobTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.enableHiveSupport()
|
|
|
|
.getOrCreate();
|
|
|
|
}
|
|
|
|
|
|
|
|
@AfterAll
|
|
|
|
public static void afterAll() throws IOException {
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
|
|
|
spark.stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
public void testImportGraphAsHiveDB() throws Exception {
|
|
|
|
|
|
|
|
GraphHiveImporterJob
|
|
|
|
.main(
|
|
|
|
new String[] {
|
|
|
|
"-isSparkSessionManaged",
|
|
|
|
Boolean.FALSE.toString(),
|
|
|
|
"-inputPath",
|
|
|
|
getClass().getResource("/eu/dnetlib/dhp/oa/graph/sample").getPath(),
|
|
|
|
"-hiveMetastoreUris",
|
|
|
|
"",
|
|
|
|
"-hiveDbName",
|
|
|
|
dbName
|
|
|
|
});
|
|
|
|
|
|
|
|
ModelSupport.oafTypes
|
|
|
|
.forEach(
|
|
|
|
(name, clazz) -> {
|
|
|
|
long count = spark.read().table(dbName + "." + name).count();
|
|
|
|
int expected = name.equals("relation") ? 100 : 10;
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
expected, count, String.format("%s should be %s", name, expected));
|
|
|
|
});
|
|
|
|
}
|
2020-04-08 13:24:43 +02:00
|
|
|
}
|