forked from D-Net/dnet-hadoop
Merge pull request '[aggregator graph] handle paths including wildcards' (#281) from aggregator_graph into beta
Reviewed-on: D-Net/dnet-hadoop#281
This commit is contained in:
commit
46d2df1c90
|
@ -92,7 +92,7 @@ public class PrepareH2020ProgrammeTest {
|
||||||
|
|
||||||
Assertions.assertEquals(0, verificationDataset.filter("classification = ''").count());
|
Assertions.assertEquals(0, verificationDataset.filter("classification = ''").count());
|
||||||
|
|
||||||
//tmp.foreach(csvProgramme -> System.out.println(OBJECT_MAPPER.writeValueAsString(csvProgramme)));
|
// tmp.foreach(csvProgramme -> System.out.println(OBJECT_MAPPER.writeValueAsString(csvProgramme)));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class ReadProjectsTest {
|
||||||
Assertions.assertEquals("H2020-EU.1.3.", project.getLegalBasis());
|
Assertions.assertEquals("H2020-EU.1.3.", project.getLegalBasis());
|
||||||
Assertions.assertEquals("MSCA-IF-2019", project.getTopics());
|
Assertions.assertEquals("MSCA-IF-2019", project.getTopics());
|
||||||
|
|
||||||
//tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
|
// tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class ReadTopicTest {
|
||||||
Assertions.assertEquals("Individual Fellowships", topic.getTitle());
|
Assertions.assertEquals("Individual Fellowships", topic.getTitle());
|
||||||
Assertions.assertEquals("MSCA-IF-2019", topic.getTopic());
|
Assertions.assertEquals("MSCA-IF-2019", topic.getTopic());
|
||||||
|
|
||||||
//tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
|
// tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
@ -36,7 +37,7 @@ import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class GenerateEntitiesApplication {
|
public class GenerateEntitiesApplication extends AbstractMigrationApplication {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(GenerateEntitiesApplication.class);
|
private static final Logger log = LoggerFactory.getLogger(GenerateEntitiesApplication.class);
|
||||||
|
|
||||||
|
@ -112,15 +113,12 @@ public class GenerateEntitiesApplication {
|
||||||
final boolean shouldHashId,
|
final boolean shouldHashId,
|
||||||
final Mode mode) {
|
final Mode mode) {
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
|
||||||
final List<String> existingSourcePaths = Arrays
|
|
||||||
.stream(sourcePaths.split(","))
|
|
||||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
log.info("Generate entities from files:");
|
log.info("Generate entities from files:");
|
||||||
existingSourcePaths.forEach(log::info);
|
existingSourcePaths.forEach(log::info);
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
JavaRDD<Oaf> inputRdd = sc.emptyRDD();
|
JavaRDD<Oaf> inputRdd = sc.emptyRDD();
|
||||||
|
|
||||||
for (final String sp : existingSourcePaths) {
|
for (final String sp : existingSourcePaths) {
|
||||||
|
|
|
@ -23,12 +23,13 @@ import org.slf4j.LoggerFactory;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class VerifyRecordsApplication {
|
public class VerifyRecordsApplication extends AbstractMigrationApplication {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(VerifyRecordsApplication.class);
|
private static final Logger log = LoggerFactory.getLogger(VerifyRecordsApplication.class);
|
||||||
|
|
||||||
|
@ -69,15 +70,13 @@ public class VerifyRecordsApplication {
|
||||||
private static void validateRecords(SparkSession spark, String sourcePaths, String invalidPath,
|
private static void validateRecords(SparkSession spark, String sourcePaths, String invalidPath,
|
||||||
VocabularyGroup vocs) {
|
VocabularyGroup vocs) {
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
|
||||||
final List<String> existingSourcePaths = Arrays
|
|
||||||
.stream(sourcePaths.split(","))
|
|
||||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
log.info("Verify records in files:");
|
log.info("Verify records in files:");
|
||||||
existingSourcePaths.forEach(log::info);
|
existingSourcePaths.forEach(log::info);
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
for (final String sp : existingSourcePaths) {
|
for (final String sp : existingSourcePaths) {
|
||||||
RDD<String> invalidRecords = sc
|
RDD<String> invalidRecords = sc
|
||||||
.sequenceFile(sp, Text.class, Text.class)
|
.sequenceFile(sp, Text.class, Text.class)
|
||||||
|
|
|
@ -3,19 +3,26 @@ package eu.dnetlib.dhp.oa.graph.raw.common;
|
||||||
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
|
||||||
|
@ -94,6 +101,15 @@ public class AbstractMigrationApplication implements Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected static List<String> listEntityPaths(final SparkSession spark, final String paths) {
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
return Arrays
|
||||||
|
.stream(paths.split(","))
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()) || p.contains("/*"))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
public ObjectMapper getObjectMapper() {
|
public ObjectMapper getObjectMapper() {
|
||||||
return objectMapper;
|
return objectMapper;
|
||||||
}
|
}
|
||||||
|
|
|
@ -215,7 +215,7 @@
|
||||||
<action name="ImportODF_claims">
|
<action name="ImportODF_claims">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/odf_claims</arg>
|
||||||
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>--mdFormat</arg><arg>ODF</arg>
|
<arg>--mdFormat</arg><arg>ODF</arg>
|
||||||
|
@ -238,7 +238,7 @@
|
||||||
<action name="ImportOAF_claims">
|
<action name="ImportOAF_claims">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
|
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_claims</arg>
|
||||||
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
|
||||||
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
|
||||||
<arg>--mdFormat</arg><arg>OAF</arg>
|
<arg>--mdFormat</arg><arg>OAF</arg>
|
||||||
|
@ -456,7 +456,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
|
||||||
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records_claim</arg>
|
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records_claim</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
@ -480,7 +480,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
|
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
|
||||||
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
|
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
|
||||||
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
|
||||||
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
|
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
|
||||||
|
|
Loading…
Reference in New Issue