2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
package eu.dnetlib.dhp.collection.plugin.base;
|
|
|
|
|
|
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
|
import java.util.HashMap;
|
|
|
|
|
import java.util.Map;
|
|
|
|
|
import java.util.Optional;
|
2024-02-14 11:39:37 +01:00
|
|
|
|
import java.util.concurrent.atomic.AtomicLong;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
import org.apache.commons.io.IOUtils;
|
|
|
|
|
import org.apache.commons.lang3.StringUtils;
|
|
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2024-02-15 14:04:17 +01:00
|
|
|
|
import org.apache.hadoop.io.LongWritable;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.hadoop.io.SequenceFile;
|
|
|
|
|
import org.apache.hadoop.io.Text;
|
|
|
|
|
import org.apache.hadoop.io.compress.DeflateCodec;
|
|
|
|
|
import org.apache.spark.SparkConf;
|
2024-02-15 14:04:17 +01:00
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
|
import org.apache.spark.sql.SaveMode;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
|
import org.dom4j.Attribute;
|
|
|
|
|
import org.dom4j.Document;
|
2024-02-15 08:21:52 +01:00
|
|
|
|
import org.dom4j.DocumentException;
|
|
|
|
|
import org.dom4j.DocumentHelper;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.dom4j.Element;
|
2024-02-14 10:37:39 +01:00
|
|
|
|
import org.dom4j.Node;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
|
|
|
|
|
|
|
|
|
public class BaseAnalyzerJob {
|
|
|
|
|
|
2024-02-14 15:52:31 +01:00
|
|
|
|
private static final Logger log = LoggerFactory.getLogger(BaseAnalyzerJob.class);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
public static void main(final String[] args) throws Exception {
|
|
|
|
|
|
|
|
|
|
final String jsonConfiguration = IOUtils
|
2024-02-15 08:52:28 +01:00
|
|
|
|
.toString(BaseAnalyzerJob.class
|
|
|
|
|
.getResourceAsStream("/eu/dnetlib/dhp/collection/plugin/base/action_set_parameters.json"));
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
|
|
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
|
|
final Boolean isSparkSessionManaged = Optional
|
2024-02-15 08:52:28 +01:00
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
|
.orElse(Boolean.TRUE);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
|
|
|
|
|
final String inputPath = parser.get("inputPath");
|
|
|
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final String dataPath = parser.get("dataPath");
|
|
|
|
|
log.info("dataPath {}: ", dataPath);
|
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final String outputPath = parser.get("outputPath");
|
|
|
|
|
log.info("outputPath {}: ", outputPath);
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final boolean reimport = Boolean.parseBoolean(parser.get("reimport"));
|
|
|
|
|
log.info("reimport {}: ", reimport);
|
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final SparkConf conf = new SparkConf();
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
runWithSparkSession(conf, isSparkSessionManaged, spark -> processBaseRecords(spark, inputPath, dataPath, outputPath, reimport));
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private static void processBaseRecords(final SparkSession spark,
|
2024-02-15 08:52:28 +01:00
|
|
|
|
final String inputPath,
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final String dataPath,
|
|
|
|
|
final String outputPath,
|
|
|
|
|
final boolean reimport) throws IOException {
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-14 11:39:37 +01:00
|
|
|
|
try (final FileSystem fs = FileSystem.get(new Configuration());
|
2024-02-15 08:52:28 +01:00
|
|
|
|
final AggregatorReport report = new AggregatorReport()) {
|
2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
if (reimport) {
|
|
|
|
|
fs.delete(new Path(dataPath), true);
|
|
|
|
|
loadRecords(fs, inputPath, dataPath, report);
|
|
|
|
|
}
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
fs.delete(new Path(outputPath), true);
|
|
|
|
|
extractInfo(spark, dataPath, outputPath);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
} catch (final Throwable e) {
|
|
|
|
|
throw new RuntimeException(e);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
private static void loadRecords(final FileSystem fs,
|
2024-02-15 08:52:28 +01:00
|
|
|
|
final String inputPath,
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final String outputPath,
|
|
|
|
|
final AggregatorReport report)
|
|
|
|
|
throws Exception {
|
2024-02-14 11:39:37 +01:00
|
|
|
|
|
|
|
|
|
final AtomicLong recordsCounter = new AtomicLong(0);
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final LongWritable key = new LongWritable();
|
|
|
|
|
final Text value = new Text();
|
|
|
|
|
|
|
|
|
|
try (final SequenceFile.Writer writer = SequenceFile
|
|
|
|
|
.createWriter(fs.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
|
|
|
|
|
.keyClass(LongWritable.class), SequenceFile.Writer
|
|
|
|
|
.valueClass(Text.class), SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final BaseCollectorIterator iteraror = new BaseCollectorIterator(fs, new Path(inputPath), report);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
while (iteraror.hasNext()) {
|
|
|
|
|
final String record = iteraror.next();
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final long i = recordsCounter.incrementAndGet();
|
|
|
|
|
if ((i % 10000) == 0) {
|
|
|
|
|
log.info("# Loaded records: " + i);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
key.set(i);
|
|
|
|
|
value.set(record);
|
|
|
|
|
try {
|
|
|
|
|
writer.append(key, value);
|
|
|
|
|
} catch (final Throwable e1) {
|
|
|
|
|
throw new RuntimeException(e1);
|
|
|
|
|
}
|
2024-02-14 10:37:39 +01:00
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
log.info("# COMPLETED - Loaded records: " + recordsCounter.get());
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
private static void extractInfo(final SparkSession spark,
|
|
|
|
|
final String inputPath,
|
|
|
|
|
final String targetPath) throws Exception {
|
|
|
|
|
|
|
|
|
|
final JavaRDD<BaseRecordInfo> rdd = JavaSparkContext.fromSparkContext(spark.sparkContext())
|
|
|
|
|
.sequenceFile(inputPath, LongWritable.class, Text.class)
|
|
|
|
|
.map(s -> s._2)
|
|
|
|
|
.map(BaseAnalyzerJob::extractInfo);
|
|
|
|
|
|
|
|
|
|
spark.createDataset(rdd.rdd(), Encoders.bean(BaseRecordInfo.class))
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.format("parquet")
|
|
|
|
|
.save(targetPath);
|
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
private static BaseRecordInfo extractInfo(final Text s) {
|
|
|
|
|
try {
|
|
|
|
|
final Document record = DocumentHelper.parseText(s.toString());
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final BaseRecordInfo info = new BaseRecordInfo();
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
for (final Object o : record.selectNodes("//*|//@*")) {
|
|
|
|
|
info.getPaths().add(((Node) o).getPath());
|
|
|
|
|
|
|
|
|
|
final String nodeName = ((Node) o).getName();
|
2024-02-15 08:52:28 +01:00
|
|
|
|
if (o instanceof Element) {
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final Element n = (Element) o;
|
2024-02-15 08:52:28 +01:00
|
|
|
|
if ("collection".equals(nodeName)) {
|
|
|
|
|
final String collName = n.getText().trim();
|
|
|
|
|
if (StringUtils.isNotBlank(collName)) {
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final Map<String, String> attrs = new HashMap<>();
|
2024-02-15 08:52:28 +01:00
|
|
|
|
for (final Object ao : n.attributes()) {
|
2024-02-15 14:04:17 +01:00
|
|
|
|
attrs.put(((Attribute) ao).getName(), ((Attribute) ao).getValue());
|
2024-02-15 08:52:28 +01:00
|
|
|
|
}
|
2024-02-15 14:04:17 +01:00
|
|
|
|
info.getCollections().put(collName, attrs);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
2024-02-15 08:52:28 +01:00
|
|
|
|
} else if ("type".equals(nodeName)) {
|
2024-02-15 14:04:17 +01:00
|
|
|
|
info.getTypes().add("TYPE: " + n.getText().trim());
|
2024-02-15 08:52:28 +01:00
|
|
|
|
} else if ("typenorm".equals(nodeName)) {
|
2024-02-15 14:04:17 +01:00
|
|
|
|
info.getTypes().add("TYPE_NORM: " + n.getText().trim());
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-15 14:04:17 +01:00
|
|
|
|
return info;
|
|
|
|
|
} catch (final DocumentException e) {
|
|
|
|
|
throw new RuntimeException(e);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|