174 lines
5.8 KiB
Java
174 lines
5.8 KiB
Java
|
|
package eu.dnetlib.dhp.migration;
|
|
|
|
import static eu.dnetlib.dhp.common.Constants.*;
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
import static eu.dnetlib.dhp.utils.DHPUtils.*;
|
|
|
|
import java.io.ByteArrayInputStream;
|
|
import java.io.IOException;
|
|
import java.nio.charset.StandardCharsets;
|
|
import java.time.LocalDate;
|
|
import java.time.ZoneId;
|
|
import java.util.Objects;
|
|
import java.util.Optional;
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
import org.apache.hadoop.io.IntWritable;
|
|
import org.apache.hadoop.io.Text;
|
|
import org.apache.spark.SparkConf;
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
import org.apache.spark.sql.Dataset;
|
|
import org.apache.spark.sql.Encoder;
|
|
import org.apache.spark.sql.Encoders;
|
|
import org.apache.spark.sql.SparkSession;
|
|
import org.apache.spark.util.LongAccumulator;
|
|
import org.dom4j.Document;
|
|
import org.dom4j.Element;
|
|
import org.dom4j.Node;
|
|
import org.dom4j.io.SAXReader;
|
|
import org.slf4j.Logger;
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
import com.github.sisyphsu.dateparser.DateParserUtils;
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion;
|
|
import eu.dnetlib.dhp.schema.mdstore.MetadataRecord;
|
|
import eu.dnetlib.dhp.schema.mdstore.Provenance;
|
|
|
|
public class MigrateNativeStoreSparkJob {
|
|
|
|
private static final Logger log = LoggerFactory.getLogger(MigrateNativeStoreSparkJob.class);
|
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
IOUtils
|
|
.toString(
|
|
MigrateNativeStoreSparkJob.class
|
|
.getResourceAsStream(
|
|
"/eu/dnetlib/dhp/migration/migrate_native_input_parameters.json")));
|
|
parser.parseArgument(args);
|
|
|
|
final String encoding = parser.get("encoding");
|
|
log.info("encoding is {}", encoding);
|
|
|
|
String mdStoreVersion = parser.get("mdStoreVersion");
|
|
log.info("mdStoreVersion is {}", mdStoreVersion);
|
|
|
|
final String datasourceId = parser.get("datasourceId");
|
|
log.info("datasourceId is {}", datasourceId);
|
|
|
|
final String datasourceName = parser.get("datasourceName");
|
|
log.info("datasourceName is {}", datasourceName);
|
|
|
|
final String nsPrefix = parser.get("nsPrefix");
|
|
log.info("nsPrefix is {}", nsPrefix);
|
|
|
|
final Provenance provenance = new Provenance(datasourceId, datasourceName, nsPrefix);
|
|
|
|
final MDStoreVersion currentVersion = MAPPER.readValue(mdStoreVersion, MDStoreVersion.class);
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
.map(Boolean::valueOf)
|
|
.orElse(Boolean.TRUE);
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
SparkConf conf = new SparkConf();
|
|
runWithSparkSession(
|
|
conf,
|
|
isSparkSessionManaged,
|
|
spark -> migrateNativeMDStore(
|
|
spark, provenance, encoding, currentVersion));
|
|
}
|
|
|
|
private static void migrateNativeMDStore(SparkSession spark,
|
|
Provenance provenance,
|
|
String encoding,
|
|
MDStoreVersion currentVersion) throws IOException {
|
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
|
|
final LongAccumulator totalItems = sc.sc().longAccumulator(CONTENT_TOTALITEMS);
|
|
final LongAccumulator invalidRecords = sc.sc().longAccumulator(CONTENT_INVALIDRECORDS);
|
|
|
|
final String seqFilePath = currentVersion.getHdfsPath() + SEQUENCE_FILE_NAME;
|
|
|
|
final JavaRDD<MetadataRecord> nativeStore = sc
|
|
.sequenceFile(seqFilePath, IntWritable.class, Text.class)
|
|
.map(
|
|
item -> parseRecord(
|
|
item._2().toString(),
|
|
encoding,
|
|
provenance,
|
|
totalItems,
|
|
invalidRecords))
|
|
.filter(Objects::nonNull)
|
|
.distinct();
|
|
|
|
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
|
final Dataset<MetadataRecord> mdstore = spark.createDataset(nativeStore.rdd(), encoder);
|
|
|
|
final String targetPath = currentVersion.getHdfsPath() + MDSTORE_DATA_PATH;
|
|
|
|
saveDataset(mdstore, targetPath);
|
|
|
|
final Long total = spark.read().load(targetPath).count();
|
|
log.info("migrated {} records for datasource '{}'", total, provenance.getDatasourceName());
|
|
|
|
writeHdfsFile(
|
|
spark.sparkContext().hadoopConfiguration(), total.toString(),
|
|
currentVersion.getHdfsPath() + MDSTORE_SIZE_PATH);
|
|
}
|
|
|
|
public static MetadataRecord parseRecord(
|
|
final String input,
|
|
final String encoding,
|
|
final Provenance provenance,
|
|
final LongAccumulator totalItems,
|
|
final LongAccumulator invalidRecords) {
|
|
|
|
if (totalItems != null)
|
|
totalItems.add(1);
|
|
try {
|
|
SAXReader reader = new SAXReader();
|
|
reader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
|
Document document = reader.read(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
|
|
document.normalize();
|
|
String id = document.valueOf("//dri:objIdentifier/text()");
|
|
String dateOfCollection = document.valueOf("//dri:dateOfCollection/text()");
|
|
final LocalDate date = DateParserUtils
|
|
.parseDate(dateOfCollection.trim())
|
|
.toInstant()
|
|
.atZone(ZoneId.systemDefault())
|
|
.toLocalDate();
|
|
|
|
document
|
|
.selectSingleNode(
|
|
"/*[local-name() = 'record']/*[local-name() = 'header']/*[local-name() = 'objIdentifier']")
|
|
.detach();
|
|
document
|
|
.selectSingleNode(
|
|
"/*[local-name() = 'record']/*[local-name() = 'header']/*[local-name() = 'recordIdentifier']")
|
|
.detach();
|
|
document
|
|
.selectSingleNode(
|
|
"/*[local-name() = 'record']/*[local-name() = 'header']/*[local-name() = 'dateOfCollection']")
|
|
.detach();
|
|
document
|
|
.selectSingleNode(
|
|
"/*[local-name() = 'record']/*[local-name() = 'header']/*[local-name() = 'datasourceprefix']")
|
|
.detach();
|
|
document.selectSingleNode("/*[local-name() = 'record']/*[local-name() = 'about']").detach();
|
|
|
|
return new MetadataRecord(id, encoding, provenance, document.asXML(), date.toEpochDay());
|
|
} catch (Throwable e) {
|
|
invalidRecords.add(1);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
}
|