2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
package eu.dnetlib.dhp.collection.plugin.base;
|
|
|
|
|
|
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import static org.apache.spark.sql.functions.col;
|
|
|
|
|
import static org.apache.spark.sql.functions.count;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import java.sql.SQLException;
|
2024-02-22 14:01:11 +01:00
|
|
|
|
import java.util.ArrayList;
|
2024-02-16 11:36:46 +01:00
|
|
|
|
import java.util.LinkedHashSet;
|
2024-02-22 14:01:11 +01:00
|
|
|
|
import java.util.List;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import java.util.Optional;
|
2024-02-16 11:36:46 +01:00
|
|
|
|
import java.util.Set;
|
2024-02-14 11:39:37 +01:00
|
|
|
|
import java.util.concurrent.atomic.AtomicLong;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
import org.apache.commons.io.IOUtils;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import org.apache.commons.lang3.ObjectUtils;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.commons.lang3.StringUtils;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import org.apache.commons.lang3.math.NumberUtils;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2024-02-15 14:04:17 +01:00
|
|
|
|
import org.apache.hadoop.io.LongWritable;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.hadoop.io.SequenceFile;
|
|
|
|
|
import org.apache.hadoop.io.Text;
|
|
|
|
|
import org.apache.hadoop.io.compress.DeflateCodec;
|
|
|
|
|
import org.apache.spark.SparkConf;
|
2024-02-15 14:04:17 +01:00
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import org.apache.spark.sql.Dataset;
|
2024-02-15 14:04:17 +01:00
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
|
import org.apache.spark.sql.SaveMode;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
|
import org.dom4j.Document;
|
2024-02-15 08:21:52 +01:00
|
|
|
|
import org.dom4j.DocumentException;
|
|
|
|
|
import org.dom4j.DocumentHelper;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.dom4j.Element;
|
2024-02-14 10:37:39 +01:00
|
|
|
|
import org.dom4j.Node;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
import eu.dnetlib.dhp.common.DbClient;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
2024-03-01 11:43:37 +01:00
|
|
|
|
import scala.Tuple2;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
public class BaseAnalyzerJob {
|
|
|
|
|
|
2024-02-28 14:34:09 +01:00
|
|
|
|
private static final String BASE_DUMP = "BASE_DUMP";
|
2024-02-14 15:52:31 +01:00
|
|
|
|
private static final Logger log = LoggerFactory.getLogger(BaseAnalyzerJob.class);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
public static void main(final String[] args) throws Exception {
|
|
|
|
|
|
|
|
|
|
final String jsonConfiguration = IOUtils
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.toString(
|
|
|
|
|
BaseAnalyzerJob.class
|
|
|
|
|
.getResourceAsStream("/eu/dnetlib/dhp/collection/plugin/base/action_set_parameters.json"));
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
|
|
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
|
|
final Boolean isSparkSessionManaged = Optional
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
|
.orElse(Boolean.TRUE);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
|
|
|
|
|
final String inputPath = parser.get("inputPath");
|
|
|
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final String dataPath = parser.get("dataPath");
|
|
|
|
|
log.info("dataPath {}: ", dataPath);
|
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final String outputPath = parser.get("outputPath");
|
|
|
|
|
log.info("outputPath {}: ", outputPath);
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final String opendoarPath = parser.get("opendoarPath");
|
|
|
|
|
log.info("opendoarPath {}: ", opendoarPath);
|
|
|
|
|
|
2024-03-01 11:43:37 +01:00
|
|
|
|
final String typesReportPath = parser.get("typesReportPath");
|
|
|
|
|
log.info("typesReportPath {}: ", typesReportPath);
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final int fromStep = Integer.parseInt(parser.get("fromStep"));
|
|
|
|
|
log.info("fromStep {}: ", fromStep);
|
|
|
|
|
|
|
|
|
|
final String dbUrl = parser.get("postgresUrl");
|
|
|
|
|
log.info("postgresUrl {}: ", dbUrl);
|
|
|
|
|
|
|
|
|
|
final String dbUser = parser.get("postgresUser");
|
|
|
|
|
log.info("postgresUser {}: ", dbUser);
|
|
|
|
|
|
|
|
|
|
final String dbPassword = parser.get("postgresPassword");
|
|
|
|
|
log.info("postgresPassword {}: ", dbPassword);
|
2024-02-15 14:04:17 +01:00
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final SparkConf conf = new SparkConf();
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
|
|
|
|
|
if (fromStep <= 0) {
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 0: LoadRecords\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
loadRecords(inputPath, dataPath);
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 0: DONE\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (fromStep <= 1) {
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 1: Base Report\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
generateReport(spark, dataPath, outputPath);
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 1: DONE\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (fromStep <= 2) {
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 2: OpenDOAR Report\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
generateOpenDoarReport(spark, outputPath, opendoarPath, loadOpenDoarStats(dbUrl, dbUser, dbPassword));
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 2: DONE\n**************************************");
|
2024-02-28 10:51:13 +01:00
|
|
|
|
}
|
2024-03-01 11:43:37 +01:00
|
|
|
|
|
|
|
|
|
if (fromStep <= 3) {
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 3: Type Vocabulary Report\n**************************************");
|
2024-03-01 11:43:37 +01:00
|
|
|
|
generateVocTypeReport(spark, outputPath, typesReportPath);
|
|
|
|
|
log
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.info(
|
|
|
|
|
"\n**************************************\n* EXECUTING STEP 3: DONE\n**************************************");
|
2024-03-01 11:43:37 +01:00
|
|
|
|
}
|
2024-02-28 10:51:13 +01:00
|
|
|
|
});
|
|
|
|
|
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
|
2024-03-01 11:43:37 +01:00
|
|
|
|
private static void generateVocTypeReport(final SparkSession spark,
|
2024-03-04 10:43:40 +01:00
|
|
|
|
final String reportPath,
|
|
|
|
|
final String typesReportPath) {
|
2024-03-01 11:43:37 +01:00
|
|
|
|
spark
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.read()
|
|
|
|
|
.parquet(reportPath)
|
|
|
|
|
.as(Encoders.bean(BaseRecordInfo.class))
|
|
|
|
|
.flatMap(rec -> {
|
|
|
|
|
final List<Tuple2<String, String>> list = new ArrayList<>();
|
|
|
|
|
for (final String t1 : rec.getTypes()) {
|
|
|
|
|
if (t1.startsWith("TYPE_NORM:")) {
|
|
|
|
|
for (final String t2 : rec.getTypes()) {
|
|
|
|
|
if (t2.startsWith("TYPE:")) {
|
|
|
|
|
list
|
|
|
|
|
.add(
|
|
|
|
|
new Tuple2<>(StringUtils.substringAfter(t1, "TYPE_NORM:").trim(),
|
|
|
|
|
StringUtils.substringAfter(t2, "TYPE:").trim()));
|
2024-03-01 11:43:37 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-03-04 10:43:40 +01:00
|
|
|
|
}
|
|
|
|
|
return list.iterator();
|
|
|
|
|
}, Encoders.tuple(Encoders.STRING(), Encoders.STRING()))
|
|
|
|
|
.distinct()
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.format("parquet")
|
|
|
|
|
.save(typesReportPath);
|
2024-03-01 11:43:37 +01:00
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
private static void generateOpenDoarReport(final SparkSession spark,
|
2024-03-04 10:43:40 +01:00
|
|
|
|
final String reportPath,
|
|
|
|
|
final String opendoarPath,
|
|
|
|
|
final List<OpenDoarRepoStatus> repos) {
|
2024-02-28 10:51:13 +01:00
|
|
|
|
|
|
|
|
|
final Dataset<OpenDoarRepoStatus> fromDB = spark.createDataset(repos, Encoders.bean(OpenDoarRepoStatus.class));
|
|
|
|
|
|
|
|
|
|
final Dataset<OpenDoarRepoStatus> fromBASE = spark
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.read()
|
|
|
|
|
.parquet(reportPath)
|
|
|
|
|
.selectExpr("explode(collections) as collection")
|
|
|
|
|
.where("isnotnull(collection.opendoarId) and character_length(collection.opendoarId)>0")
|
|
|
|
|
.selectExpr("concat('opendoar____::',collection.opendoarId) as id")
|
|
|
|
|
.groupBy(col("id"))
|
|
|
|
|
.agg(count(col("id")))
|
|
|
|
|
.map(row -> {
|
|
|
|
|
final OpenDoarRepoStatus repo = new OpenDoarRepoStatus();
|
|
|
|
|
repo.setId(row.getString(0));
|
|
|
|
|
repo.getAggregations().put(BASE_DUMP, row.getLong(1));
|
|
|
|
|
repo.setBaseCount(row.getLong(1));
|
|
|
|
|
repo.setOpenaireCount(0);
|
|
|
|
|
repo.setHighCompliance(false);
|
|
|
|
|
return repo;
|
|
|
|
|
}, Encoders.bean(OpenDoarRepoStatus.class));
|
2024-02-28 10:51:13 +01:00
|
|
|
|
|
|
|
|
|
fromDB
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.joinWith(fromBASE, fromDB.col("id").equalTo(fromBASE.col("id")), "full_outer")
|
|
|
|
|
.map(t -> merge(t._1, t._2), Encoders.bean(OpenDoarRepoStatus.class))
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.format("parquet")
|
|
|
|
|
.save(opendoarPath);
|
2024-02-28 10:51:13 +01:00
|
|
|
|
}
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
private static OpenDoarRepoStatus merge(final OpenDoarRepoStatus r1, final OpenDoarRepoStatus r2) {
|
2024-03-04 10:43:40 +01:00
|
|
|
|
if (r1 == null) {
|
|
|
|
|
return r2;
|
|
|
|
|
}
|
|
|
|
|
if (r2 == null) {
|
|
|
|
|
return r1;
|
|
|
|
|
}
|
2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final OpenDoarRepoStatus r = new OpenDoarRepoStatus();
|
|
|
|
|
r.setId(ObjectUtils.firstNonNull(r1.getId(), r2.getId()));
|
|
|
|
|
r.setJurisdiction(ObjectUtils.firstNonNull(r1.getJurisdiction(), r2.getJurisdiction()));
|
|
|
|
|
r.getAggregations().putAll(r1.getAggregations());
|
|
|
|
|
r.getAggregations().putAll(r2.getAggregations());
|
2024-02-28 14:34:09 +01:00
|
|
|
|
r.setHighCompliance(r1.isHighCompliance() || r2.isHighCompliance());
|
2024-02-29 10:17:31 +01:00
|
|
|
|
r.setBaseCount(Math.max(r1.getBaseCount(), r2.getBaseCount()));
|
|
|
|
|
r.setOpenaireCount(Math.max(r1.getOpenaireCount(), r2.getOpenaireCount()));
|
2024-02-28 10:51:13 +01:00
|
|
|
|
|
|
|
|
|
return r;
|
|
|
|
|
}
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
private static List<OpenDoarRepoStatus> loadOpenDoarStats(final String dbUrl,
|
2024-03-04 10:43:40 +01:00
|
|
|
|
final String dbUser,
|
|
|
|
|
final String dbPassword) throws Exception {
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final List<OpenDoarRepoStatus> repos = new ArrayList<>();
|
|
|
|
|
|
|
|
|
|
try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
|
|
|
|
|
|
|
|
|
|
final String sql = IOUtils
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.toString(
|
|
|
|
|
BaseAnalyzerJob.class
|
|
|
|
|
.getResourceAsStream(
|
|
|
|
|
"/eu/dnetlib/dhp/collection/plugin/base/sql/opendoar-aggregation-status.sql"));
|
2024-02-28 10:51:13 +01:00
|
|
|
|
|
|
|
|
|
dbClient.processResults(sql, row -> {
|
|
|
|
|
try {
|
|
|
|
|
final OpenDoarRepoStatus repo = new OpenDoarRepoStatus();
|
|
|
|
|
repo.setId(row.getString("id"));
|
|
|
|
|
repo.setJurisdiction(row.getString("jurisdiction"));
|
2024-02-29 10:17:31 +01:00
|
|
|
|
repo.setBaseCount(0);
|
|
|
|
|
repo.setHighCompliance(false);
|
|
|
|
|
|
|
|
|
|
long sum = 0;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
for (final String s : (String[]) row.getArray("aggregations").getArray()) {
|
|
|
|
|
final String api = StringUtils.substringBefore(s, "@@@");
|
|
|
|
|
final long count = NumberUtils.toLong(StringUtils.substringAfter(s, "@@@"), 0);
|
2024-02-29 10:17:31 +01:00
|
|
|
|
sum += count;
|
2024-02-28 10:51:13 +01:00
|
|
|
|
repo.getAggregations().put(api, count);
|
2024-02-28 14:34:09 +01:00
|
|
|
|
// This should recognize the HIGH Compliances: openaire*X.Y*
|
2024-02-29 10:17:31 +01:00
|
|
|
|
if (s.contains("compliance: openaire")) {
|
|
|
|
|
repo.setHighCompliance(true);
|
|
|
|
|
}
|
2024-02-28 10:51:13 +01:00
|
|
|
|
}
|
2024-02-29 10:17:31 +01:00
|
|
|
|
repo.setOpenaireCount(sum);
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
repos.add(repo);
|
|
|
|
|
log.info("# FOUND OPENDOAR (DB): " + repo.getId());
|
|
|
|
|
} catch (final SQLException e) {
|
|
|
|
|
log.error("Error in SQL", e);
|
|
|
|
|
throw new RuntimeException("Error in SQL", e);
|
|
|
|
|
}
|
|
|
|
|
});
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
2024-02-28 10:51:13 +01:00
|
|
|
|
return repos;
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
private static void loadRecords(final String inputPath, final String outputPath) throws Exception {
|
|
|
|
|
try (final FileSystem fs = FileSystem.get(new Configuration());
|
2024-03-04 10:43:40 +01:00
|
|
|
|
final AggregatorReport report = new AggregatorReport()) {
|
2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final AtomicLong recordsCounter = new AtomicLong(0);
|
2024-02-14 11:39:37 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final LongWritable key = new LongWritable();
|
|
|
|
|
final Text value = new Text();
|
2024-02-15 14:04:17 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
try (final SequenceFile.Writer writer = SequenceFile
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.createWriter(
|
|
|
|
|
fs.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
|
|
|
|
|
.keyClass(LongWritable.class),
|
|
|
|
|
SequenceFile.Writer
|
|
|
|
|
.valueClass(Text.class),
|
|
|
|
|
SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final BaseCollectorIterator iteraror = new BaseCollectorIterator(fs, new Path(inputPath), report);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
while (iteraror.hasNext()) {
|
|
|
|
|
final String record = iteraror.next();
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
final long i = recordsCounter.incrementAndGet();
|
|
|
|
|
if ((i % 10000) == 0) {
|
|
|
|
|
log.info("# Loaded records: " + i);
|
|
|
|
|
}
|
2024-02-15 14:04:17 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
key.set(i);
|
|
|
|
|
value.set(record);
|
|
|
|
|
try {
|
|
|
|
|
writer.append(key, value);
|
|
|
|
|
} catch (final Throwable e1) {
|
|
|
|
|
throw new RuntimeException(e1);
|
|
|
|
|
}
|
2024-02-15 14:04:17 +01:00
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
log.info("# COMPLETED - Loaded records: " + recordsCounter.get());
|
|
|
|
|
}
|
2024-02-15 14:04:17 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-28 10:51:13 +01:00
|
|
|
|
private static void generateReport(final SparkSession spark,
|
2024-03-04 10:43:40 +01:00
|
|
|
|
final String inputPath,
|
|
|
|
|
final String targetPath) throws Exception {
|
2024-02-16 11:36:46 +01:00
|
|
|
|
|
|
|
|
|
final JavaRDD<BaseRecordInfo> rdd = JavaSparkContext
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.fromSparkContext(spark.sparkContext())
|
|
|
|
|
.sequenceFile(inputPath, LongWritable.class, Text.class)
|
|
|
|
|
.map(s -> s._2.toString())
|
|
|
|
|
.map(BaseAnalyzerJob::extractInfo);
|
2024-02-16 11:36:46 +01:00
|
|
|
|
|
|
|
|
|
spark
|
2024-03-04 10:43:40 +01:00
|
|
|
|
.createDataset(rdd.rdd(), Encoders.bean(BaseRecordInfo.class))
|
|
|
|
|
.write()
|
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
|
.format("parquet")
|
|
|
|
|
.save(targetPath);
|
2024-02-15 14:04:17 +01:00
|
|
|
|
}
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-16 11:36:46 +01:00
|
|
|
|
protected static BaseRecordInfo extractInfo(final String s) {
|
2024-02-15 14:04:17 +01:00
|
|
|
|
try {
|
2024-02-16 11:36:46 +01:00
|
|
|
|
final Document record = DocumentHelper.parseText(s);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
final BaseRecordInfo info = new BaseRecordInfo();
|
2024-02-14 10:37:39 +01:00
|
|
|
|
|
2024-02-16 11:36:46 +01:00
|
|
|
|
final Set<String> paths = new LinkedHashSet<>();
|
|
|
|
|
final Set<String> types = new LinkedHashSet<>();
|
2024-02-22 14:01:11 +01:00
|
|
|
|
final List<BaseCollectionInfo> colls = new ArrayList<>();
|
2024-02-15 14:27:50 +01:00
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
for (final Object o : record.selectNodes("//*|//@*")) {
|
2024-02-16 11:36:46 +01:00
|
|
|
|
paths.add(((Node) o).getPath());
|
2024-02-15 14:04:17 +01:00
|
|
|
|
|
2024-02-15 08:52:28 +01:00
|
|
|
|
if (o instanceof Element) {
|
2024-02-13 13:48:26 +01:00
|
|
|
|
final Element n = (Element) o;
|
2024-02-15 14:27:50 +01:00
|
|
|
|
|
|
|
|
|
final String nodeName = n.getName();
|
|
|
|
|
|
2024-02-15 08:52:28 +01:00
|
|
|
|
if ("collection".equals(nodeName)) {
|
|
|
|
|
final String collName = n.getText().trim();
|
2024-02-22 14:01:11 +01:00
|
|
|
|
|
2024-02-15 08:52:28 +01:00
|
|
|
|
if (StringUtils.isNotBlank(collName)) {
|
2024-02-22 14:01:11 +01:00
|
|
|
|
final BaseCollectionInfo coll = new BaseCollectionInfo();
|
|
|
|
|
coll.setId(collName);
|
|
|
|
|
coll.setOpendoarId(n.valueOf("@opendoar_id").trim());
|
|
|
|
|
coll.setRorId(n.valueOf("@ror_id").trim());
|
|
|
|
|
colls.add(coll);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
2024-02-15 08:52:28 +01:00
|
|
|
|
} else if ("type".equals(nodeName)) {
|
2024-02-16 11:36:46 +01:00
|
|
|
|
types.add("TYPE: " + n.getText().trim());
|
2024-02-15 08:52:28 +01:00
|
|
|
|
} else if ("typenorm".equals(nodeName)) {
|
2024-02-16 11:36:46 +01:00
|
|
|
|
types.add("TYPE_NORM: " + n.getText().trim());
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-02-16 11:36:46 +01:00
|
|
|
|
|
|
|
|
|
info.setId(record.valueOf("//*[local-name() = 'header']/*[local-name() = 'identifier']").trim());
|
|
|
|
|
info.getTypes().addAll(types);
|
|
|
|
|
info.getPaths().addAll(paths);
|
|
|
|
|
info.setCollections(colls);
|
|
|
|
|
|
2024-02-15 14:04:17 +01:00
|
|
|
|
return info;
|
|
|
|
|
} catch (final DocumentException e) {
|
|
|
|
|
throw new RuntimeException(e);
|
2024-02-13 13:48:26 +01:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|