forked from D-Net/dnet-hadoop
[aggregator graph] handle paths including wildcards
This commit is contained in:
parent
588aca5ce4
commit
7fd89566c2
|
@ -9,6 +9,7 @@ import java.util.Objects;
|
|||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -36,7 +37,7 @@ import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class GenerateEntitiesApplication {
|
||||
public class GenerateEntitiesApplication extends AbstractMigrationApplication {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(GenerateEntitiesApplication.class);
|
||||
|
||||
|
@ -112,15 +113,12 @@ public class GenerateEntitiesApplication {
|
|||
final boolean shouldHashId,
|
||||
final Mode mode) {
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
final List<String> existingSourcePaths = Arrays
|
||||
.stream(sourcePaths.split(","))
|
||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
||||
.collect(Collectors.toList());
|
||||
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
|
||||
|
||||
log.info("Generate entities from files:");
|
||||
existingSourcePaths.forEach(log::info);
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
JavaRDD<Oaf> inputRdd = sc.emptyRDD();
|
||||
|
||||
for (final String sp : existingSourcePaths) {
|
||||
|
|
|
@ -9,6 +9,7 @@ import java.util.Objects;
|
|||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.spark.SparkConf;
|
||||
|
@ -28,7 +29,7 @@ import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class VerifyRecordsApplication {
|
||||
public class VerifyRecordsApplication extends AbstractMigrationApplication {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(VerifyRecordsApplication.class);
|
||||
|
||||
|
@ -69,15 +70,13 @@ public class VerifyRecordsApplication {
|
|||
private static void validateRecords(SparkSession spark, String sourcePaths, String invalidPath,
|
||||
VocabularyGroup vocs) {
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
final List<String> existingSourcePaths = Arrays
|
||||
.stream(sourcePaths.split(","))
|
||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
||||
.collect(Collectors.toList());
|
||||
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
|
||||
|
||||
log.info("Verify records in files:");
|
||||
existingSourcePaths.forEach(log::info);
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
for (final String sp : existingSourcePaths) {
|
||||
RDD<String> invalidRecords = sc
|
||||
.sequenceFile(sp, Text.class, Text.class)
|
||||
|
|
|
@ -3,9 +3,14 @@ package eu.dnetlib.dhp.oa.graph.raw.common;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -18,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
|
||||
public class AbstractMigrationApplication implements Closeable {
|
||||
|
||||
|
@ -94,6 +101,15 @@ public class AbstractMigrationApplication implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
protected static List<String> listEntityPaths(final SparkSession spark, final String paths) {
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
return Arrays
|
||||
.stream(paths.split(","))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()) || p.contains("/*"))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public ObjectMapper getObjectMapper() {
|
||||
return objectMapper;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue