dnet-hadoop/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/base/BaseAnalyzerJob.java

343 lines
11 KiB
Java
Raw Normal View History

2024-02-14 11:39:37 +01:00
package eu.dnetlib.dhp.collection.plugin.base;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
2024-02-28 10:51:13 +01:00
import static org.apache.spark.sql.functions.col;
import static org.apache.spark.sql.functions.count;
2024-02-28 10:51:13 +01:00
import java.sql.SQLException;
2024-02-22 14:01:11 +01:00
import java.util.ArrayList;
2024-02-16 11:36:46 +01:00
import java.util.LinkedHashSet;
2024-02-22 14:01:11 +01:00
import java.util.List;
2024-02-28 14:34:09 +01:00
import java.util.Map.Entry;
import java.util.Optional;
2024-02-16 11:36:46 +01:00
import java.util.Set;
2024-02-14 11:39:37 +01:00
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.io.IOUtils;
2024-02-28 10:51:13 +01:00
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
2024-02-28 10:51:13 +01:00
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
2024-02-15 14:04:17 +01:00
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.DeflateCodec;
import org.apache.spark.SparkConf;
2024-02-15 14:04:17 +01:00
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
2024-02-28 10:51:13 +01:00
import org.apache.spark.sql.Dataset;
2024-02-15 14:04:17 +01:00
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.dom4j.Document;
2024-02-15 08:21:52 +01:00
import org.dom4j.DocumentException;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
2024-02-14 10:37:39 +01:00
import org.dom4j.Node;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
2024-02-28 10:51:13 +01:00
import eu.dnetlib.dhp.common.DbClient;
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
public class BaseAnalyzerJob {
2024-02-28 14:34:09 +01:00
private static final String BASE_DUMP = "BASE_DUMP";
2024-02-14 15:52:31 +01:00
private static final Logger log = LoggerFactory.getLogger(BaseAnalyzerJob.class);
public static void main(final String[] args) throws Exception {
final String jsonConfiguration = IOUtils
2024-02-16 11:36:46 +01:00
.toString(
BaseAnalyzerJob.class
.getResourceAsStream("/eu/dnetlib/dhp/collection/plugin/base/action_set_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
final Boolean isSparkSessionManaged = Optional
2024-02-16 11:36:46 +01:00
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String inputPath = parser.get("inputPath");
log.info("inputPath: {}", inputPath);
2024-02-15 14:04:17 +01:00
final String dataPath = parser.get("dataPath");
log.info("dataPath {}: ", dataPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
2024-02-28 10:51:13 +01:00
final String opendoarPath = parser.get("opendoarPath");
log.info("opendoarPath {}: ", opendoarPath);
final int fromStep = Integer.parseInt(parser.get("fromStep"));
log.info("fromStep {}: ", fromStep);
final String dbUrl = parser.get("postgresUrl");
log.info("postgresUrl {}: ", dbUrl);
final String dbUser = parser.get("postgresUser");
log.info("postgresUser {}: ", dbUser);
final String dbPassword = parser.get("postgresPassword");
log.info("postgresPassword {}: ", dbPassword);
2024-02-15 14:04:17 +01:00
final SparkConf conf = new SparkConf();
2024-02-28 10:51:13 +01:00
runWithSparkSession(conf, isSparkSessionManaged, spark -> {
if (fromStep <= 0) {
log
.info(
"\n**************************************\n* EXECUTING STEP 0: LoadRecords\n**************************************");
loadRecords(inputPath, dataPath);
log
.info(
"\n**************************************\n* EXECUTING STEP 0: DONE\n**************************************");
}
if (fromStep <= 1) {
log
.info(
"\n**************************************\n* EXECUTING STEP 1: Base Report\n**************************************");
generateReport(spark, dataPath, outputPath);
log
.info(
"\n**************************************\n* EXECUTING STEP 1: DONE\n**************************************");
}
if (fromStep <= 2) {
log
.info(
"\n**************************************\n* EXECUTING STEP 2: OpenDOAR Report\n**************************************");
generateOpenDoarReport(spark, outputPath, opendoarPath, loadOpenDoarStats(dbUrl, dbUser, dbPassword));
log
.info(
"\n**************************************\n* EXECUTING STEP 2: DONE\n**************************************");
}
});
}
2024-02-28 10:51:13 +01:00
private static void generateOpenDoarReport(final SparkSession spark,
final String reportPath,
final String opendoarPath,
final List<OpenDoarRepoStatus> repos) {
final Dataset<OpenDoarRepoStatus> fromDB = spark.createDataset(repos, Encoders.bean(OpenDoarRepoStatus.class));
final Dataset<OpenDoarRepoStatus> fromBASE = spark
.read()
.parquet(reportPath)
.selectExpr("explode(collections) as collection")
.where("isnotnull(collection.opendoarId) and character_length(collection.opendoarId)>0")
.selectExpr("concat('opendoar____::',collection.opendoarId) as id")
.groupBy(col("id"))
.agg(count(col("id")))
.map(row -> {
final OpenDoarRepoStatus repo = new OpenDoarRepoStatus();
repo.setId(row.getString(0));
2024-02-28 14:34:09 +01:00
repo.getAggregations().put(BASE_DUMP, row.getLong(1));
repo.setFromBase(true);
repo.setBaseMAX(true);
repo.setHighCompliance(false);
2024-02-28 10:51:13 +01:00
return repo;
}, Encoders.bean(OpenDoarRepoStatus.class));
fromDB
.joinWith(fromBASE, fromDB.col("id").equalTo(fromBASE.col("id")), "full_outer")
.map(t -> merge(t._1, t._2), Encoders.bean(OpenDoarRepoStatus.class))
.write()
.mode(SaveMode.Overwrite)
.format("parquet")
.save(opendoarPath);
}
2024-02-14 10:37:39 +01:00
2024-02-28 10:51:13 +01:00
private static OpenDoarRepoStatus merge(final OpenDoarRepoStatus r1, final OpenDoarRepoStatus r2) {
if (r1 == null) {
return r2;
}
if (r2 == null) {
return r1;
}
2024-02-14 11:39:37 +01:00
2024-02-28 10:51:13 +01:00
final OpenDoarRepoStatus r = new OpenDoarRepoStatus();
r.setId(ObjectUtils.firstNonNull(r1.getId(), r2.getId()));
r.setJurisdiction(ObjectUtils.firstNonNull(r1.getJurisdiction(), r2.getJurisdiction()));
r.getAggregations().putAll(r1.getAggregations());
r.getAggregations().putAll(r2.getAggregations());
2024-02-28 14:34:09 +01:00
r.setFromBase(r1.isFromBase() || r2.isFromBase());
r.setHighCompliance(r1.isHighCompliance() || r2.isHighCompliance());
if (r.getAggregations().containsKey(BASE_DUMP)) {
final long baseSize = r.getAggregations().get(BASE_DUMP);
final long otherSize = r
.getAggregations()
.entrySet()
.stream()
.filter(e -> !BASE_DUMP.equals(e.getKey()))
.mapToLong(Entry::getValue)
.max()
.orElse(0);
r.setBaseMAX(baseSize > otherSize);
} else {
r.setBaseMAX(false);
}
2024-02-28 10:51:13 +01:00
return r;
}
2024-02-14 10:37:39 +01:00
2024-02-28 10:51:13 +01:00
private static List<OpenDoarRepoStatus> loadOpenDoarStats(final String dbUrl,
final String dbUser,
final String dbPassword) throws Exception {
final List<OpenDoarRepoStatus> repos = new ArrayList<>();
try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
final String sql = IOUtils
.toString(
BaseAnalyzerJob.class
.getResourceAsStream(
"/eu/dnetlib/dhp/collection/plugin/base/sql/opendoar-aggregation-status.sql"));
dbClient.processResults(sql, row -> {
try {
final OpenDoarRepoStatus repo = new OpenDoarRepoStatus();
repo.setId(row.getString("id"));
repo.setJurisdiction(row.getString("jurisdiction"));
for (final String s : (String[]) row.getArray("aggregations").getArray()) {
final String api = StringUtils.substringBefore(s, "@@@");
final long count = NumberUtils.toLong(StringUtils.substringAfter(s, "@@@"), 0);
repo.getAggregations().put(api, count);
2024-02-28 14:34:09 +01:00
repo.setFromBase(false);
repo.setBaseMAX(false);
// This should recognize the HIGH Compliances: openaire*X.Y*
repo.setHighCompliance(s.contains("compliance: openaire"));
2024-02-28 10:51:13 +01:00
}
repos.add(repo);
log.info("# FOUND OPENDOAR (DB): " + repo.getId());
} catch (final SQLException e) {
log.error("Error in SQL", e);
throw new RuntimeException("Error in SQL", e);
}
});
}
2024-02-28 10:51:13 +01:00
return repos;
}
2024-02-28 10:51:13 +01:00
private static void loadRecords(final String inputPath, final String outputPath) throws Exception {
try (final FileSystem fs = FileSystem.get(new Configuration());
final AggregatorReport report = new AggregatorReport()) {
2024-02-14 11:39:37 +01:00
2024-02-28 10:51:13 +01:00
final AtomicLong recordsCounter = new AtomicLong(0);
2024-02-14 11:39:37 +01:00
2024-02-28 10:51:13 +01:00
final LongWritable key = new LongWritable();
final Text value = new Text();
2024-02-15 14:04:17 +01:00
2024-02-28 10:51:13 +01:00
try (final SequenceFile.Writer writer = SequenceFile
.createWriter(
fs.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
.keyClass(LongWritable.class),
SequenceFile.Writer
.valueClass(Text.class),
SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
2024-02-28 10:51:13 +01:00
final BaseCollectorIterator iteraror = new BaseCollectorIterator(fs, new Path(inputPath), report);
2024-02-28 10:51:13 +01:00
while (iteraror.hasNext()) {
final String record = iteraror.next();
2024-02-28 10:51:13 +01:00
final long i = recordsCounter.incrementAndGet();
if ((i % 10000) == 0) {
log.info("# Loaded records: " + i);
}
2024-02-15 14:04:17 +01:00
2024-02-28 10:51:13 +01:00
key.set(i);
value.set(record);
try {
writer.append(key, value);
} catch (final Throwable e1) {
throw new RuntimeException(e1);
}
2024-02-15 14:04:17 +01:00
}
2024-02-28 10:51:13 +01:00
log.info("# COMPLETED - Loaded records: " + recordsCounter.get());
}
2024-02-15 14:04:17 +01:00
}
}
2024-02-28 10:51:13 +01:00
private static void generateReport(final SparkSession spark,
2024-02-16 11:36:46 +01:00
final String inputPath,
final String targetPath) throws Exception {
final JavaRDD<BaseRecordInfo> rdd = JavaSparkContext
.fromSparkContext(spark.sparkContext())
.sequenceFile(inputPath, LongWritable.class, Text.class)
.map(s -> s._2.toString())
.map(BaseAnalyzerJob::extractInfo);
spark
.createDataset(rdd.rdd(), Encoders.bean(BaseRecordInfo.class))
.write()
.mode(SaveMode.Overwrite)
.format("parquet")
.save(targetPath);
2024-02-15 14:04:17 +01:00
}
2024-02-16 11:36:46 +01:00
protected static BaseRecordInfo extractInfo(final String s) {
2024-02-15 14:04:17 +01:00
try {
2024-02-16 11:36:46 +01:00
final Document record = DocumentHelper.parseText(s);
2024-02-15 14:04:17 +01:00
final BaseRecordInfo info = new BaseRecordInfo();
2024-02-14 10:37:39 +01:00
2024-02-16 11:36:46 +01:00
final Set<String> paths = new LinkedHashSet<>();
final Set<String> types = new LinkedHashSet<>();
2024-02-22 14:01:11 +01:00
final List<BaseCollectionInfo> colls = new ArrayList<>();
2024-02-15 14:27:50 +01:00
2024-02-15 14:04:17 +01:00
for (final Object o : record.selectNodes("//*|//@*")) {
2024-02-16 11:36:46 +01:00
paths.add(((Node) o).getPath());
2024-02-15 14:04:17 +01:00
2024-02-15 08:52:28 +01:00
if (o instanceof Element) {
final Element n = (Element) o;
2024-02-15 14:27:50 +01:00
final String nodeName = n.getName();
2024-02-15 08:52:28 +01:00
if ("collection".equals(nodeName)) {
final String collName = n.getText().trim();
2024-02-22 14:01:11 +01:00
2024-02-15 08:52:28 +01:00
if (StringUtils.isNotBlank(collName)) {
2024-02-22 14:01:11 +01:00
final BaseCollectionInfo coll = new BaseCollectionInfo();
coll.setId(collName);
coll.setOpendoarId(n.valueOf("@opendoar_id").trim());
coll.setRorId(n.valueOf("@ror_id").trim());
colls.add(coll);
}
2024-02-15 08:52:28 +01:00
} else if ("type".equals(nodeName)) {
2024-02-16 11:36:46 +01:00
types.add("TYPE: " + n.getText().trim());
2024-02-15 08:52:28 +01:00
} else if ("typenorm".equals(nodeName)) {
2024-02-16 11:36:46 +01:00
types.add("TYPE_NORM: " + n.getText().trim());
}
}
}
2024-02-16 11:36:46 +01:00
info.setId(record.valueOf("//*[local-name() = 'header']/*[local-name() = 'identifier']").trim());
info.getTypes().addAll(types);
info.getPaths().addAll(paths);
info.setCollections(colls);
2024-02-15 14:04:17 +01:00
return info;
} catch (final DocumentException e) {
throw new RuntimeException(e);
}
}
}