dnet-hadoop/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/collection/plugin/base/BaseCollectorPlugin.java

124 lines
4.0 KiB
Java
Raw Normal View History

2024-02-14 11:39:37 +01:00
package eu.dnetlib.dhp.collection.plugin.base;
2024-03-01 13:38:36 +01:00
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Optional;
2024-03-01 13:38:36 +01:00
import java.util.Set;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
2024-03-01 13:38:36 +01:00
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
2024-03-01 13:38:36 +01:00
import org.dom4j.DocumentException;
import org.dom4j.DocumentHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.collection.ApiDescriptor;
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
import eu.dnetlib.dhp.collection.plugin.file.AbstractSplittedRecordPlugin;
2024-03-01 13:38:36 +01:00
import eu.dnetlib.dhp.common.DbClient;
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
import eu.dnetlib.dhp.common.collection.CollectorException;
public class BaseCollectorPlugin implements CollectorPlugin {
private final FileSystem fs;
private static final Logger log = LoggerFactory.getLogger(AbstractSplittedRecordPlugin.class);
2024-02-14 11:39:37 +01:00
// MAPPING AND FILTERING ARE DEFINED HERE:
// https://docs.google.com/document/d/1Aj-ZAV11b44MCrAAUCPiS2TUlXb6PnJEu1utCMAcCOU/edit
2024-02-06 15:10:29 +01:00
public BaseCollectorPlugin(final FileSystem fs) {
this.fs = fs;
}
@Override
public Stream<String> collect(final ApiDescriptor api, final AggregatorReport report) throws CollectorException {
// get path to file
final Path filePath = Optional
2024-03-08 11:52:58 +01:00
.ofNullable(api.getBaseUrl())
.map(Path::new)
.orElseThrow(() -> new CollectorException("missing baseUrl"));
2024-03-01 13:38:36 +01:00
final String dbUrl = api.getParams().get("dbUrl");
final String dbUser = api.getParams().get("dbUser");
final String dbPassword = api.getParams().get("dbPassword");
log.info("baseUrl: {}", filePath);
2024-03-01 13:38:36 +01:00
log.info("dbUrl: {}", dbUrl);
log.info("dbUser: {}", dbUser);
log.info("dbPassword: {}", "***");
try {
2024-03-08 11:52:58 +01:00
if (!this.fs.exists(filePath)) {
throw new CollectorException("path does not exist: " + filePath);
}
} catch (final Throwable e) {
throw new CollectorException(e);
}
final Set<String> acceptedOpendoarIds = findAcceptedOpendoarIds(dbUrl, dbUser, dbPassword);
2024-03-01 13:38:36 +01:00
2024-02-15 08:21:52 +01:00
final Iterator<String> iterator = new BaseCollectorIterator(this.fs, filePath, report);
final Spliterator<String> spliterator = Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED);
2024-02-14 11:39:37 +01:00
return StreamSupport
2024-03-08 11:52:58 +01:00
.stream(spliterator, false)
.filter(doc -> filterXml(doc, acceptedOpendoarIds, report));
}
2024-03-04 10:43:40 +01:00
private Set<String> findAcceptedOpendoarIds(final String dbUrl, final String dbUser, final String dbPassword)
2024-03-08 11:52:58 +01:00
throws CollectorException {
final Set<String> accepted = new HashSet<>();
2024-03-01 13:38:36 +01:00
try (final DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
final String sql = IOUtils
2024-03-08 11:52:58 +01:00
.toString(
BaseAnalyzerJob.class
.getResourceAsStream("/eu/dnetlib/dhp/collection/plugin/base/sql/opendoar-accepted.sql"));
2024-03-01 13:38:36 +01:00
dbClient.processResults(sql, row -> {
try {
2024-03-08 08:23:07 +01:00
final String dsId = row.getString("id");
log.info("Accepted Datasource: " + dsId);
accepted.add(dsId);
2024-03-01 13:38:36 +01:00
} catch (final SQLException e) {
log.error("Error in SQL", e);
throw new RuntimeException("Error in SQL", e);
}
});
2024-03-08 08:23:07 +01:00
2024-03-01 13:38:36 +01:00
} catch (final IOException e) {
log.error("Error accessong SQL", e);
throw new CollectorException("Error accessong SQL", e);
}
2024-03-08 08:23:07 +01:00
log.info("Accepted Datasources (TOTAL): " + accepted.size());
return accepted;
2024-03-01 13:38:36 +01:00
}
2024-02-06 15:10:29 +01:00
private boolean filterXml(final String xml, final Set<String> acceptedOpendoarIds, final AggregatorReport report) {
2024-03-01 13:38:36 +01:00
try {
2024-03-08 11:52:58 +01:00
final String id = DocumentHelper
.parseText(xml)
.valueOf("//*[local-name()='collection']/@opendoar_id")
.trim();
return (StringUtils.isNotBlank(id) && acceptedOpendoarIds.contains("opendoar____::" + id.trim()));
2024-03-01 13:38:36 +01:00
} catch (final DocumentException e) {
log.error("Error parsing document", e);
throw new RuntimeException("Error parsing document", e);
}
2024-02-06 15:10:29 +01:00
}
}