1
0
Fork 0

Merge pull request '[aggregator graph] handle paths including wildcards' (#281) from aggregator_graph into beta

Reviewed-on: D-Net/dnet-hadoop#281
This commit is contained in:
Claudio Atzori 2023-03-08 21:17:39 +01:00
commit 46d2df1c90
7 changed files with 32 additions and 19 deletions

View File

@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
@ -36,7 +37,7 @@ import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import scala.Tuple2;
public class GenerateEntitiesApplication {
public class GenerateEntitiesApplication extends AbstractMigrationApplication {
private static final Logger log = LoggerFactory.getLogger(GenerateEntitiesApplication.class);
@ -112,15 +113,12 @@ public class GenerateEntitiesApplication {
final boolean shouldHashId,
final Mode mode) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final List<String> existingSourcePaths = Arrays
.stream(sourcePaths.split(","))
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
.collect(Collectors.toList());
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
log.info("Generate entities from files:");
existingSourcePaths.forEach(log::info);
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Oaf> inputRdd = sc.emptyRDD();
for (final String sp : existingSourcePaths) {

View File

@ -23,12 +23,13 @@ import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import scala.Tuple2;
public class VerifyRecordsApplication {
public class VerifyRecordsApplication extends AbstractMigrationApplication {
private static final Logger log = LoggerFactory.getLogger(VerifyRecordsApplication.class);
@ -69,15 +70,13 @@ public class VerifyRecordsApplication {
private static void validateRecords(SparkSession spark, String sourcePaths, String invalidPath,
VocabularyGroup vocs) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final List<String> existingSourcePaths = Arrays
.stream(sourcePaths.split(","))
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
.collect(Collectors.toList());
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
log.info("Verify records in files:");
existingSourcePaths.forEach(log::info);
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
for (final String sp : existingSourcePaths) {
RDD<String> invalidRecords = sc
.sequenceFile(sp, Text.class, Text.class)

View File

@ -3,19 +3,26 @@ package eu.dnetlib.dhp.oa.graph.raw.common;
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.utils.DHPUtils;
@ -94,6 +101,15 @@ public class AbstractMigrationApplication implements Closeable {
}
}
protected static List<String> listEntityPaths(final SparkSession spark, final String paths) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
return Arrays
.stream(paths.split(","))
.filter(StringUtils::isNotBlank)
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()) || p.contains("/*"))
.collect(Collectors.toList());
}
public ObjectMapper getObjectMapper() {
return objectMapper;
}

View File

@ -215,7 +215,7 @@
<action name="ImportODF_claims">
<java>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/odf_claims</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>ODF</arg>
@ -238,7 +238,7 @@
<action name="ImportOAF_claims">
<java>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_claims</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>OAF</arg>
@ -456,7 +456,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records_claim</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
</spark>
@ -480,7 +480,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>