2020-04-27 14:52:31 +02:00
|
|
|
|
2019-04-11 15:39:29 +02:00
|
|
|
package eu.dnetlib.dhp.transformation;
|
|
|
|
|
2020-05-05 12:39:04 +02:00
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
2020-04-18 12:42:58 +02:00
|
|
|
import java.io.ByteArrayInputStream;
|
|
|
|
import java.util.HashMap;
|
|
|
|
import java.util.Map;
|
|
|
|
import java.util.Objects;
|
2020-05-05 12:39:04 +02:00
|
|
|
import java.util.Optional;
|
2020-04-28 11:23:29 +02:00
|
|
|
|
2021-01-27 15:43:08 +01:00
|
|
|
import eu.dnetlib.dhp.aggregation.common.AggregationCounter;
|
|
|
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
|
|
|
import eu.dnetlib.dhp.transformation.xslt.XSLTTransformationFunction;
|
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2019-10-10 11:33:51 +02:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2021-01-27 15:43:08 +01:00
|
|
|
import org.apache.commons.lang3.StringUtils;
|
2020-05-05 12:39:04 +02:00
|
|
|
import org.apache.spark.SparkConf;
|
2021-01-27 15:43:08 +01:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
2019-04-11 15:39:29 +02:00
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoder;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.apache.spark.util.LongAccumulator;
|
|
|
|
import org.dom4j.Document;
|
|
|
|
import org.dom4j.DocumentException;
|
|
|
|
import org.dom4j.Node;
|
|
|
|
import org.dom4j.io.SAXReader;
|
2020-05-05 12:39:04 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
2019-04-11 15:39:29 +02:00
|
|
|
|
2020-04-28 11:23:29 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.model.mdstore.MetadataRecord;
|
|
|
|
import eu.dnetlib.dhp.transformation.vocabulary.VocabularyHelper;
|
|
|
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
|
|
|
import eu.dnetlib.message.Message;
|
|
|
|
import eu.dnetlib.message.MessageManager;
|
|
|
|
import eu.dnetlib.message.MessageType;
|
|
|
|
|
2019-04-11 15:39:29 +02:00
|
|
|
public class TransformSparkJobNode {
|
|
|
|
|
2020-05-05 12:39:04 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(TransformSparkJobNode.class);
|
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
public static void main(String[] args) throws Exception {
|
2019-04-11 15:39:29 +02:00
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
|
|
|
IOUtils
|
|
|
|
.toString(
|
|
|
|
TransformSparkJobNode.class
|
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/transformation/transformation_input_parameters.json")));
|
2019-04-11 15:39:29 +02:00
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
parser.parseArgument(args);
|
2019-04-11 15:39:29 +02:00
|
|
|
|
2020-05-05 12:39:04 +02:00
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
.orElse(Boolean.TRUE);
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
2020-04-27 14:52:31 +02:00
|
|
|
final String inputPath = parser.get("input");
|
|
|
|
final String outputPath = parser.get("output");
|
2021-01-27 15:43:08 +01:00
|
|
|
// TODO this variable will be used after implementing Messaging with DNet Aggregator
|
2020-04-27 14:52:31 +02:00
|
|
|
final String workflowId = parser.get("workflowId");
|
2020-05-05 12:39:04 +02:00
|
|
|
|
2021-01-27 15:43:08 +01:00
|
|
|
final String isLookupUrl = parser.get("isLookupUrl");
|
|
|
|
log.info(String.format("isLookupUrl: %s", isLookupUrl));
|
|
|
|
|
|
|
|
final ISLookUpService isLookupService = ISLookupClientFactory.getLookUpService(isLookupUrl);
|
2020-04-18 12:42:58 +02:00
|
|
|
|
2020-05-05 12:39:04 +02:00
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
runWithSparkSession(
|
|
|
|
conf,
|
|
|
|
isSparkSessionManaged,
|
2021-01-27 15:43:08 +01:00
|
|
|
spark -> transformRecords(parser.getObjectMap(), isLookupService, spark, inputPath, outputPath));
|
2020-04-27 14:52:31 +02:00
|
|
|
}
|
2019-04-11 15:39:29 +02:00
|
|
|
|
2021-01-27 15:43:08 +01:00
|
|
|
|
|
|
|
public static void transformRecords(final Map<String,String>args, final ISLookUpService isLookUpService, final SparkSession spark, final String inputPath, final String outputPath) throws DnetTransformationException {
|
|
|
|
|
|
|
|
final LongAccumulator totalItems = spark.sparkContext().longAccumulator("TotalItems");
|
|
|
|
final LongAccumulator errorItems = spark.sparkContext().longAccumulator("errorItems");
|
|
|
|
final LongAccumulator transformedItems = spark.sparkContext().longAccumulator("transformedItems");
|
|
|
|
final AggregationCounter ct = new AggregationCounter(totalItems, errorItems,transformedItems );
|
|
|
|
final Encoder<MetadataRecord> encoder = Encoders.bean(MetadataRecord.class);
|
|
|
|
final Dataset<MetadataRecord> mdstoreInput = spark.read().format("parquet").load(inputPath).as(encoder);
|
|
|
|
final MapFunction<MetadataRecord, MetadataRecord> XSLTTransformationFunction = TransformationFactory.getTransformationPlugin(args,ct, isLookUpService);
|
|
|
|
mdstoreInput.map(XSLTTransformationFunction, encoder).write().save(outputPath);
|
|
|
|
|
|
|
|
log.info("Transformed item "+ ct.getProcessedItems().count());
|
|
|
|
log.info("Total item "+ ct.getTotalItems().count());
|
|
|
|
log.info("Transformation Error item "+ ct.getErrorItems().count());
|
2020-04-27 14:52:31 +02:00
|
|
|
}
|
2021-01-27 15:43:08 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-04-18 12:42:58 +02:00
|
|
|
}
|