WIP: metadata collection in INCREMENTAL mode and relative test
parent
bead34d11a
commit
8eaa1fd4b4
@ -0,0 +1,169 @@
|
||||
|
||||
package eu.dnetlib.dhp.collection;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoder;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.data.mdstore.manager.common.model.MDStoreVersion;
|
||||
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
||||
|
||||
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||
public class GenerateNativeStoreSparkJobTest {
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private static SparkSession spark;
|
||||
|
||||
private static Path workingDir;
|
||||
|
||||
private static Encoder<MetadataRecord> encoder;
|
||||
|
||||
private static final String encoding = "XML";
|
||||
private static final String dateOfCollection = System.currentTimeMillis() + "";
|
||||
private static final String xpath = "//*[local-name()='header']/*[local-name()='identifier']";
|
||||
private static String provenance;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GenerateNativeStoreSparkJobTest.class);
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
provenance = IOUtils.toString(GenerateNativeStoreSparkJobTest.class.getResourceAsStream("provenance.json"));
|
||||
workingDir = Files.createTempDirectory(GenerateNativeStoreSparkJobTest.class.getSimpleName());
|
||||
log.info("using work dir {}", workingDir);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
conf.setAppName(GenerateNativeStoreSparkJobTest.class.getSimpleName());
|
||||
|
||||
conf.setMaster("local[*]");
|
||||
conf.set("spark.driver.host", "localhost");
|
||||
conf.set("hive.metastore.local", "true");
|
||||
conf.set("spark.ui.enabled", "false");
|
||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||
|
||||
encoder = Encoders.bean(MetadataRecord.class);
|
||||
spark = SparkSession
|
||||
.builder()
|
||||
.appName(GenerateNativeStoreSparkJobTest.class.getSimpleName())
|
||||
.config(conf)
|
||||
.getOrCreate();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterAll() throws IOException {
|
||||
FileUtils.deleteDirectory(workingDir.toFile());
|
||||
spark.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(1)
|
||||
public void testGenerateNativeStoreSparkJobRefresh() throws Exception {
|
||||
|
||||
MDStoreVersion mdStoreV1 = prepareVersion("mdStoreVersion_1.json");
|
||||
FileUtils.forceMkdir(new File(mdStoreV1.getHdfsPath()));
|
||||
|
||||
IOUtils
|
||||
.copy(
|
||||
getClass().getResourceAsStream("sequence_file"),
|
||||
new FileOutputStream(mdStoreV1.getHdfsPath() + "/sequence_file"));
|
||||
|
||||
GenerateNativeStoreSparkJob
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-encoding", encoding,
|
||||
"-dateOfCollection", dateOfCollection,
|
||||
"-provenance", provenance,
|
||||
"-xpath", xpath,
|
||||
"-mdStoreVersion", OBJECT_MAPPER.writeValueAsString(mdStoreV1),
|
||||
"-readMdStoreVersion", "",
|
||||
"-workflowId", "abc"
|
||||
});
|
||||
|
||||
verify(mdStoreV1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2)
|
||||
public void testGenerateNativeStoreSparkJobIncremental() throws Exception {
|
||||
|
||||
MDStoreVersion mdStoreV2 = prepareVersion("mdStoreVersion_2.json");
|
||||
FileUtils.forceMkdir(new File(mdStoreV2.getHdfsPath()));
|
||||
|
||||
IOUtils
|
||||
.copy(
|
||||
getClass().getResourceAsStream("sequence_file"),
|
||||
new FileOutputStream(mdStoreV2.getHdfsPath() + "/sequence_file"));
|
||||
|
||||
MDStoreVersion mdStoreV1 = prepareVersion("mdStoreVersion_1.json");
|
||||
|
||||
GenerateNativeStoreSparkJob
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-encoding", encoding,
|
||||
"-dateOfCollection", dateOfCollection,
|
||||
"-provenance", provenance,
|
||||
"-xpath", xpath,
|
||||
"-mdStoreVersion", OBJECT_MAPPER.writeValueAsString(mdStoreV2),
|
||||
"-readMdStoreVersion", OBJECT_MAPPER.writeValueAsString(mdStoreV1),
|
||||
"-workflowId", "abc"
|
||||
});
|
||||
|
||||
verify(mdStoreV2);
|
||||
}
|
||||
|
||||
protected void verify(MDStoreVersion mdStoreVersion) throws IOException {
|
||||
Assertions.assertTrue(new File(mdStoreVersion.getHdfsPath()).exists());
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
long seqFileSize = sc
|
||||
.sequenceFile(mdStoreVersion.getHdfsPath() + "/sequence_file", IntWritable.class, Text.class)
|
||||
.count();
|
||||
|
||||
final Dataset<MetadataRecord> mdstore = spark.read().load(mdStoreVersion.getHdfsPath() + "/store").as(encoder);
|
||||
long mdStoreSize = mdstore.count();
|
||||
|
||||
long declaredSize = Long.parseLong(IOUtils.toString(new FileReader(mdStoreVersion.getHdfsPath() + "/size")));
|
||||
|
||||
Assertions.assertEquals(seqFileSize, declaredSize, "the size must be equal");
|
||||
Assertions.assertEquals(seqFileSize, mdStoreSize, "the size must be equal");
|
||||
|
||||
long uniqueIds = mdstore
|
||||
.map((MapFunction<MetadataRecord, String>) MetadataRecord::getId, Encoders.STRING())
|
||||
.distinct()
|
||||
.count();
|
||||
|
||||
Assertions.assertEquals(seqFileSize, uniqueIds, "the size must be equal");
|
||||
}
|
||||
|
||||
private MDStoreVersion prepareVersion(String filename) throws IOException {
|
||||
MDStoreVersion mdstore = OBJECT_MAPPER
|
||||
.readValue(IOUtils.toString(getClass().getResource(filename)), MDStoreVersion.class);
|
||||
mdstore.setHdfsPath(String.format(mdstore.getHdfsPath(), workingDir.toString()));
|
||||
return mdstore;
|
||||
}
|
||||
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
"id": "md-7557225f-77cc-407d-bdf4-d2fe03131464-1611935085410",
|
||||
"mdstore": "md-7557225f-77cc-407d-bdf4-d2fe03131464",
|
||||
"writing": true,
|
||||
"readCount": 0,
|
||||
"lastUpdate": null,
|
||||
"size": 0,
|
||||
"hdfsPath": "/data/dnet.dev/mdstore/md-7557225f-77cc-407d-bdf4-d2fe03131464/md-7557225f-77cc-407d-bdf4-d2fe03131464-1611935085410"
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
{
|
||||
"id":"md-84e86d00-5771-4ed9-b17f-177ef4b46e42-1612187678801",
|
||||
"mdstore":"md-84e86d00-5771-4ed9-b17f-177ef4b46e42",
|
||||
"writing":true,
|
||||
"readCount":0,
|
||||
"lastUpdate":null,
|
||||
"size":0,
|
||||
"hdfsPath":"%s/mdstore/md-84e86d00-5771-4ed9-b17f-177ef4b46e42/v1"
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
{
|
||||
"id":"md-84e86d00-5771-4ed9-b17f-177ef4b46e42-1612187459108",
|
||||
"mdstore":"md-84e86d00-5771-4ed9-b17f-177ef4b46e42",
|
||||
"writing":false,
|
||||
"readCount":1,
|
||||
"lastUpdate":1612187563099,
|
||||
"size":71,
|
||||
"hdfsPath":"%s/mdstore/md-84e86d00-5771-4ed9-b17f-177ef4b46e42/v2"
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
{
|
||||
"datasourceId":"74912366-d6df-49c1-a1fd-8a52fa98ce5f_UmVwb3NpdG9yeVNlcnZpY2VSZXNvdXJjZXMvUmVwb3NpdG9yeVNlcnZpY2VSZXNvdXJjZVR5cGU\u003d",
|
||||
"datasourceName":"PSNC Institutional Repository",
|
||||
"nsPrefix":"psnc______pl"
|
||||
}
|
Binary file not shown.
Loading…
Reference in New Issue