forked from D-Net/dnet-hadoop
refactoring after compile
This commit is contained in:
parent
7184cc0804
commit
32870339f5
|
@ -23,59 +23,59 @@ import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
|
||||||
public class ReadDatasourceMasterDuplicateFromDB {
|
public class ReadDatasourceMasterDuplicateFromDB {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(ReadDatasourceMasterDuplicateFromDB.class);
|
private static final Logger log = LoggerFactory.getLogger(ReadDatasourceMasterDuplicateFromDB.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
private static final String QUERY = "SELECT distinct dd.id as masterId, d.officialname as masterName, dd.duplicate as duplicateId "
|
private static final String QUERY = "SELECT distinct dd.id as masterId, d.officialname as masterName, dd.duplicate as duplicateId "
|
||||||
+
|
+
|
||||||
"FROM dsm_dedup_services dd join dsm_services d on (dd.id = d.id);";
|
"FROM dsm_dedup_services dd join dsm_services d on (dd.id = d.id);";
|
||||||
|
|
||||||
public static int execute(String dbUrl, String dbUser, String dbPassword, String hdfsPath, String hdfsNameNode)
|
public static int execute(String dbUrl, String dbUser, String dbPassword, String hdfsPath, String hdfsNameNode)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
|
try (DbClient dbClient = new DbClient(dbUrl, dbUser, dbPassword)) {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set("fs.defaultFS", hdfsNameNode);
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
FileSystem fileSystem = FileSystem.get(conf);
|
FileSystem fileSystem = FileSystem.get(conf);
|
||||||
FSDataOutputStream fos = fileSystem.create(new Path(hdfsPath));
|
FSDataOutputStream fos = fileSystem.create(new Path(hdfsPath));
|
||||||
|
|
||||||
log.info("running query: {}", QUERY);
|
log.info("running query: {}", QUERY);
|
||||||
log.info("storing results in: {}", hdfsPath);
|
log.info("storing results in: {}", hdfsPath);
|
||||||
|
|
||||||
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8))) {
|
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8))) {
|
||||||
dbClient.processResults(QUERY, rs -> writeMap(datasourceMasterMap(rs), writer));
|
dbClient.processResults(QUERY, rs -> writeMap(datasourceMasterMap(rs), writer));
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MasterDuplicate datasourceMasterMap(ResultSet rs) {
|
private static MasterDuplicate datasourceMasterMap(ResultSet rs) {
|
||||||
try {
|
try {
|
||||||
final MasterDuplicate md = new MasterDuplicate();
|
final MasterDuplicate md = new MasterDuplicate();
|
||||||
|
|
||||||
final String duplicateId = rs.getString("duplicateId");
|
final String duplicateId = rs.getString("duplicateId");
|
||||||
final String masterId = rs.getString("masterId");
|
final String masterId = rs.getString("masterId");
|
||||||
final String masterName = rs.getString("masterName");
|
final String masterName = rs.getString("masterName");
|
||||||
|
|
||||||
md.setDuplicateId(OafMapperUtils.createOpenaireId(10, duplicateId, true));
|
md.setDuplicateId(OafMapperUtils.createOpenaireId(10, duplicateId, true));
|
||||||
md.setMasterId(OafMapperUtils.createOpenaireId(10, masterId, true));
|
md.setMasterId(OafMapperUtils.createOpenaireId(10, masterId, true));
|
||||||
md.setMasterName(masterName);
|
md.setMasterName(masterName);
|
||||||
|
|
||||||
return md;
|
return md;
|
||||||
} catch (final SQLException e) {
|
} catch (final SQLException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeMap(final MasterDuplicate dm, final BufferedWriter writer) {
|
private static void writeMap(final MasterDuplicate dm, final BufferedWriter writer) {
|
||||||
try {
|
try {
|
||||||
writer.write(OBJECT_MAPPER.writeValueAsString(dm));
|
writer.write(OBJECT_MAPPER.writeValueAsString(dm));
|
||||||
writer.newLine();
|
writer.newLine();
|
||||||
} catch (final IOException e) {
|
} catch (final IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,6 @@ import java.io.Serializable;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.actionmanager.createunresolvedentities;
|
package eu.dnetlib.dhp.actionmanager.createunresolvedentities;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import java.io.IOException;
|
||||||
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
@ -17,9 +20,9 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author miriam.baglioni
|
* @author miriam.baglioni
|
||||||
|
@ -27,48 +30,48 @@ import java.nio.file.Path;
|
||||||
*/
|
*/
|
||||||
public class GetFosTest {
|
public class GetFosTest {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(ProduceTest.class);
|
private static final Logger log = LoggerFactory.getLogger(ProduceTest.class);
|
||||||
|
|
||||||
private static Path workingDir;
|
private static Path workingDir;
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
private static LocalFileSystem fs;
|
private static LocalFileSystem fs;
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@BeforeAll
|
@BeforeAll
|
||||||
public static void beforeAll() throws IOException {
|
public static void beforeAll() throws IOException {
|
||||||
workingDir = Files.createTempDirectory(PrepareTest.class.getSimpleName());
|
workingDir = Files.createTempDirectory(PrepareTest.class.getSimpleName());
|
||||||
|
|
||||||
fs = FileSystem.getLocal(new Configuration());
|
fs = FileSystem.getLocal(new Configuration());
|
||||||
log.info("using work dir {}", workingDir);
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
conf.setAppName(ProduceTest.class.getSimpleName());
|
conf.setAppName(ProduceTest.class.getSimpleName());
|
||||||
|
|
||||||
conf.setMaster("local[*]");
|
conf.setMaster("local[*]");
|
||||||
conf.set("spark.driver.host", "localhost");
|
conf.set("spark.driver.host", "localhost");
|
||||||
conf.set("hive.metastore.local", "true");
|
conf.set("hive.metastore.local", "true");
|
||||||
conf.set("spark.ui.enabled", "false");
|
conf.set("spark.ui.enabled", "false");
|
||||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
spark = SparkSession
|
spark = SparkSession
|
||||||
.builder()
|
.builder()
|
||||||
.appName(PrepareTest.class.getSimpleName())
|
.appName(PrepareTest.class.getSimpleName())
|
||||||
.config(conf)
|
.config(conf)
|
||||||
.getOrCreate();
|
.getOrCreate();
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterAll
|
@AfterAll
|
||||||
public static void afterAll() throws IOException {
|
public static void afterAll() throws IOException {
|
||||||
FileUtils.deleteDirectory(workingDir.toFile());
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
spark.stop();
|
spark.stop();
|
||||||
}
|
}
|
||||||
@Test
|
|
||||||
|
@Test
|
||||||
void test3() throws Exception {
|
void test3() throws Exception {
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs.tsv")
|
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs.tsv")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
|
|
||||||
final String outputPath = workingDir.toString() + "/fos.json";
|
final String outputPath = workingDir.toString() + "/fos.json";
|
||||||
GetFOSSparkJob
|
GetFOSSparkJob
|
||||||
|
|
|
@ -10,36 +10,36 @@ import eu.dnetlib.dhp.common.action.ReadDatasourceMasterDuplicateFromDB;
|
||||||
|
|
||||||
public class MasterDuplicateAction {
|
public class MasterDuplicateAction {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(MasterDuplicateAction.class);
|
private static final Logger log = LoggerFactory.getLogger(MasterDuplicateAction.class);
|
||||||
|
|
||||||
public static void main(final String[] args) throws Exception {
|
public static void main(final String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
IOUtils
|
IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
MasterDuplicateAction.class
|
MasterDuplicateAction.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/datasourcemaster_parameters.json")));
|
"/eu/dnetlib/dhp/oa/graph/datasourcemaster_parameters.json")));
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
final String dbUrl = parser.get("postgresUrl");
|
final String dbUrl = parser.get("postgresUrl");
|
||||||
log.info("postgresUrl: {}", dbUrl);
|
log.info("postgresUrl: {}", dbUrl);
|
||||||
|
|
||||||
final String dbUser = parser.get("postgresUser");
|
final String dbUser = parser.get("postgresUser");
|
||||||
log.info("postgresUser: {}", dbUser);
|
log.info("postgresUser: {}", dbUser);
|
||||||
|
|
||||||
final String dbPassword = parser.get("postgresPassword");
|
final String dbPassword = parser.get("postgresPassword");
|
||||||
log.info("postgresPassword: {}", dbPassword);
|
log.info("postgresPassword: {}", dbPassword);
|
||||||
|
|
||||||
final String hdfsPath = parser.get("hdfsPath");
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
log.info("hdfsPath: {}", hdfsPath);
|
log.info("hdfsPath: {}", hdfsPath);
|
||||||
|
|
||||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
log.info("hdfsNameNode: {}", hdfsNameNode);
|
log.info("hdfsNameNode: {}", hdfsNameNode);
|
||||||
|
|
||||||
int rows = ReadDatasourceMasterDuplicateFromDB.execute(dbUrl, dbUser, dbPassword, hdfsPath, hdfsNameNode);
|
int rows = ReadDatasourceMasterDuplicateFromDB.execute(dbUrl, dbUser, dbPassword, hdfsPath, hdfsNameNode);
|
||||||
|
|
||||||
log.info("written {} rows", rows);
|
log.info("written {} rows", rows);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,191 +37,191 @@ import scala.Tuple2;
|
||||||
|
|
||||||
public class CleanCfHbSparkJob {
|
public class CleanCfHbSparkJob {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(CleanCfHbSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(CleanCfHbSparkJob.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
String jsonConfiguration = IOUtils
|
String jsonConfiguration = IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
CleanCountrySparkJob.class
|
CleanCountrySparkJob.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/input_clean_cfhb_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/input_clean_cfhb_parameters.json"));
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged = Optional
|
Boolean isSparkSessionManaged = Optional
|
||||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
String inputPath = parser.get("inputPath");
|
String inputPath = parser.get("inputPath");
|
||||||
log.info("inputPath: {}", inputPath);
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
String resolvedPath = parser.get("resolvedPath");
|
String resolvedPath = parser.get("resolvedPath");
|
||||||
log.info("resolvedPath: {}", resolvedPath);
|
log.info("resolvedPath: {}", resolvedPath);
|
||||||
|
|
||||||
String outputPath = parser.get("outputPath");
|
String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
String dsMasterDuplicatePath = parser.get("masterDuplicatePath");
|
String dsMasterDuplicatePath = parser.get("masterDuplicatePath");
|
||||||
log.info("masterDuplicatePath: {}", dsMasterDuplicatePath);
|
log.info("masterDuplicatePath: {}", dsMasterDuplicatePath);
|
||||||
|
|
||||||
String graphTableClassName = parser.get("graphTableClassName");
|
String graphTableClassName = parser.get("graphTableClassName");
|
||||||
log.info("graphTableClassName: {}", graphTableClassName);
|
log.info("graphTableClassName: {}", graphTableClassName);
|
||||||
|
|
||||||
Class<? extends Result> entityClazz = (Class<? extends Result>) Class.forName(graphTableClassName);
|
Class<? extends Result> entityClazz = (Class<? extends Result>) Class.forName(graphTableClassName);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
HdfsSupport.remove(resolvedPath, spark.sparkContext().hadoopConfiguration());
|
HdfsSupport.remove(resolvedPath, spark.sparkContext().hadoopConfiguration());
|
||||||
cleanCfHb(
|
cleanCfHb(
|
||||||
spark, inputPath, entityClazz, resolvedPath, dsMasterDuplicatePath, outputPath);
|
spark, inputPath, entityClazz, resolvedPath, dsMasterDuplicatePath, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Result> void cleanCfHb(SparkSession spark, String inputPath, Class<T> entityClazz,
|
private static <T extends Result> void cleanCfHb(SparkSession spark, String inputPath, Class<T> entityClazz,
|
||||||
String resolvedPath, String masterDuplicatePath, String outputPath) {
|
String resolvedPath, String masterDuplicatePath, String outputPath) {
|
||||||
|
|
||||||
// read the master-duplicate tuples
|
// read the master-duplicate tuples
|
||||||
Dataset<MasterDuplicate> md = spark
|
Dataset<MasterDuplicate> md = spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(masterDuplicatePath)
|
.textFile(masterDuplicatePath)
|
||||||
.map(as(MasterDuplicate.class), Encoders.bean(MasterDuplicate.class));
|
.map(as(MasterDuplicate.class), Encoders.bean(MasterDuplicate.class));
|
||||||
|
|
||||||
// prepare the resolved CF|HB references with the corresponding EMPTY master ID
|
// prepare the resolved CF|HB references with the corresponding EMPTY master ID
|
||||||
Dataset<IdCfHbMapping> resolved = spark
|
Dataset<IdCfHbMapping> resolved = spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputPath)
|
.textFile(inputPath)
|
||||||
.map(as(entityClazz), Encoders.bean(entityClazz))
|
.map(as(entityClazz), Encoders.bean(entityClazz))
|
||||||
.flatMap(flattenCfHbFn(), Encoders.bean(IdCfHbMapping.class));
|
.flatMap(flattenCfHbFn(), Encoders.bean(IdCfHbMapping.class));
|
||||||
|
|
||||||
// set the EMPTY master ID/NAME and save it
|
// set the EMPTY master ID/NAME and save it
|
||||||
resolved
|
resolved
|
||||||
.joinWith(md, resolved.col("cfhb").equalTo(md.col("duplicateId")))
|
.joinWith(md, resolved.col("cfhb").equalTo(md.col("duplicateId")))
|
||||||
.map(asIdCfHbMapping(), Encoders.bean(IdCfHbMapping.class))
|
.map(asIdCfHbMapping(), Encoders.bean(IdCfHbMapping.class))
|
||||||
.filter((FilterFunction<IdCfHbMapping>) m -> Objects.nonNull(m.getMasterId()))
|
.filter((FilterFunction<IdCfHbMapping>) m -> Objects.nonNull(m.getMasterId()))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.json(resolvedPath);
|
.json(resolvedPath);
|
||||||
|
|
||||||
// read again the resolved CF|HB mapping
|
// read again the resolved CF|HB mapping
|
||||||
Dataset<IdCfHbMapping> resolvedDS = spark
|
Dataset<IdCfHbMapping> resolvedDS = spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(resolvedPath)
|
.textFile(resolvedPath)
|
||||||
.map(as(IdCfHbMapping.class), Encoders.bean(IdCfHbMapping.class));
|
.map(as(IdCfHbMapping.class), Encoders.bean(IdCfHbMapping.class));
|
||||||
|
|
||||||
// read the result table
|
// read the result table
|
||||||
Dataset<T> res = spark
|
Dataset<T> res = spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(inputPath)
|
.textFile(inputPath)
|
||||||
.map(as(entityClazz), Encoders.bean(entityClazz));
|
.map(as(entityClazz), Encoders.bean(entityClazz));
|
||||||
|
|
||||||
// Join the results with the resolved CF|HB mapping, apply the mapping and save it
|
// Join the results with the resolved CF|HB mapping, apply the mapping and save it
|
||||||
res
|
res
|
||||||
.joinWith(resolvedDS, res.col("id").equalTo(resolvedDS.col("resultId")), "left")
|
.joinWith(resolvedDS, res.col("id").equalTo(resolvedDS.col("resultId")), "left")
|
||||||
.groupByKey((MapFunction<Tuple2<T, IdCfHbMapping>, String>) t -> t._1().getId(), Encoders.STRING())
|
.groupByKey((MapFunction<Tuple2<T, IdCfHbMapping>, String>) t -> t._1().getId(), Encoders.STRING())
|
||||||
.mapGroups(getMapGroupsFunction(), Encoders.bean(entityClazz))
|
.mapGroups(getMapGroupsFunction(), Encoders.bean(entityClazz))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath);
|
.json(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MapFunction<Tuple2<IdCfHbMapping, MasterDuplicate>, IdCfHbMapping> asIdCfHbMapping() {
|
private static MapFunction<Tuple2<IdCfHbMapping, MasterDuplicate>, IdCfHbMapping> asIdCfHbMapping() {
|
||||||
return t -> {
|
return t -> {
|
||||||
final IdCfHbMapping mapping = t._1();
|
final IdCfHbMapping mapping = t._1();
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(t._2())
|
.ofNullable(t._2())
|
||||||
.ifPresent(t2 -> {
|
.ifPresent(t2 -> {
|
||||||
mapping.setMasterId(t2.getMasterId());
|
mapping.setMasterId(t2.getMasterId());
|
||||||
mapping.setMasterName(t2.getMasterName());
|
mapping.setMasterName(t2.getMasterName());
|
||||||
|
|
||||||
});
|
});
|
||||||
return mapping;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Result> FlatMapFunction<T, IdCfHbMapping> flattenCfHbFn() {
|
private static <T extends Result> FlatMapFunction<T, IdCfHbMapping> flattenCfHbFn() {
|
||||||
return r -> Stream
|
return r -> Stream
|
||||||
.concat(
|
.concat(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(r.getCollectedfrom())
|
.ofNullable(r.getCollectedfrom())
|
||||||
.map(cf -> cf.stream().map(KeyValue::getKey))
|
.map(cf -> cf.stream().map(KeyValue::getKey))
|
||||||
.orElse(Stream.empty()),
|
.orElse(Stream.empty()),
|
||||||
Stream
|
Stream
|
||||||
.concat(
|
.concat(
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(r.getInstance())
|
.ofNullable(r.getInstance())
|
||||||
.map(
|
.map(
|
||||||
instances -> instances
|
instances -> instances
|
||||||
.stream()
|
.stream()
|
||||||
.map(i -> Optional.ofNullable(i.getHostedby()).map(KeyValue::getKey).orElse("")))
|
.map(i -> Optional.ofNullable(i.getHostedby()).map(KeyValue::getKey).orElse("")))
|
||||||
.orElse(Stream.empty())
|
.orElse(Stream.empty())
|
||||||
.filter(StringUtils::isNotBlank),
|
.filter(StringUtils::isNotBlank),
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(r.getInstance())
|
.ofNullable(r.getInstance())
|
||||||
.map(
|
.map(
|
||||||
instances -> instances
|
instances -> instances
|
||||||
.stream()
|
.stream()
|
||||||
.map(
|
.map(
|
||||||
i -> Optional
|
i -> Optional
|
||||||
.ofNullable(i.getCollectedfrom())
|
.ofNullable(i.getCollectedfrom())
|
||||||
.map(KeyValue::getKey)
|
.map(KeyValue::getKey)
|
||||||
.orElse("")))
|
.orElse("")))
|
||||||
.orElse(Stream.empty())
|
.orElse(Stream.empty())
|
||||||
.filter(StringUtils::isNotBlank)))
|
.filter(StringUtils::isNotBlank)))
|
||||||
.distinct()
|
.distinct()
|
||||||
.filter(StringUtils::isNotBlank)
|
.filter(StringUtils::isNotBlank)
|
||||||
.map(cfHb -> asIdCfHbMapping(r.getId(), cfHb))
|
.map(cfHb -> asIdCfHbMapping(r.getId(), cfHb))
|
||||||
.iterator();
|
.iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T extends Result> MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T> getMapGroupsFunction() {
|
private static <T extends Result> MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T> getMapGroupsFunction() {
|
||||||
return new MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T>() {
|
return new MapGroupsFunction<String, Tuple2<T, IdCfHbMapping>, T>() {
|
||||||
@Override
|
@Override
|
||||||
public T call(String key, Iterator<Tuple2<T, IdCfHbMapping>> values) {
|
public T call(String key, Iterator<Tuple2<T, IdCfHbMapping>> values) {
|
||||||
final Tuple2<T, IdCfHbMapping> first = values.next();
|
final Tuple2<T, IdCfHbMapping> first = values.next();
|
||||||
final T res = first._1();
|
final T res = first._1();
|
||||||
|
|
||||||
updateResult(res, first._2());
|
updateResult(res, first._2());
|
||||||
values.forEachRemaining(t -> updateResult(res, t._2()));
|
values.forEachRemaining(t -> updateResult(res, t._2()));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateResult(T res, IdCfHbMapping m) {
|
private void updateResult(T res, IdCfHbMapping m) {
|
||||||
if (Objects.nonNull(m)) {
|
if (Objects.nonNull(m)) {
|
||||||
res.getCollectedfrom().forEach(kv -> updateKeyValue(kv, m));
|
res.getCollectedfrom().forEach(kv -> updateKeyValue(kv, m));
|
||||||
res.getInstance().forEach(i -> {
|
res.getInstance().forEach(i -> {
|
||||||
updateKeyValue(i.getHostedby(), m);
|
updateKeyValue(i.getHostedby(), m);
|
||||||
updateKeyValue(i.getCollectedfrom(), m);
|
updateKeyValue(i.getCollectedfrom(), m);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateKeyValue(final KeyValue kv, final IdCfHbMapping a) {
|
private void updateKeyValue(final KeyValue kv, final IdCfHbMapping a) {
|
||||||
if (kv.getKey().equals(a.getCfhb())) {
|
if (kv.getKey().equals(a.getCfhb())) {
|
||||||
kv.setKey(a.getMasterId());
|
kv.setKey(a.getMasterId());
|
||||||
kv.setValue(a.getMasterName());
|
kv.setValue(a.getMasterName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IdCfHbMapping asIdCfHbMapping(String resultId, String cfHb) {
|
private static IdCfHbMapping asIdCfHbMapping(String resultId, String cfHb) {
|
||||||
IdCfHbMapping m = new IdCfHbMapping(resultId);
|
IdCfHbMapping m = new IdCfHbMapping(resultId);
|
||||||
m.setCfhb(cfHb);
|
m.setCfhb(cfHb);
|
||||||
return m;
|
return m;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R> MapFunction<String, R> as(Class<R> clazz) {
|
private static <R> MapFunction<String, R> as(Class<R> clazz) {
|
||||||
return s -> OBJECT_MAPPER.readValue(s, clazz);
|
return s -> OBJECT_MAPPER.readValue(s, clazz);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,50 +5,50 @@ import java.io.Serializable;
|
||||||
|
|
||||||
public class IdCfHbMapping implements Serializable {
|
public class IdCfHbMapping implements Serializable {
|
||||||
|
|
||||||
private String resultId;
|
private String resultId;
|
||||||
|
|
||||||
private String cfhb;
|
private String cfhb;
|
||||||
|
|
||||||
private String masterId;
|
private String masterId;
|
||||||
|
|
||||||
private String masterName;
|
private String masterName;
|
||||||
|
|
||||||
public IdCfHbMapping() {
|
public IdCfHbMapping() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public IdCfHbMapping(String id) {
|
public IdCfHbMapping(String id) {
|
||||||
this.resultId = id;
|
this.resultId = id;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getResultId() {
|
public String getResultId() {
|
||||||
return resultId;
|
return resultId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setResultId(String resultId) {
|
public void setResultId(String resultId) {
|
||||||
this.resultId = resultId;
|
this.resultId = resultId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getCfhb() {
|
public String getCfhb() {
|
||||||
return cfhb;
|
return cfhb;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setCfhb(String cfhb) {
|
public void setCfhb(String cfhb) {
|
||||||
this.cfhb = cfhb;
|
this.cfhb = cfhb;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getMasterId() {
|
public String getMasterId() {
|
||||||
return masterId;
|
return masterId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMasterId(String masterId) {
|
public void setMasterId(String masterId) {
|
||||||
this.masterId = masterId;
|
this.masterId = masterId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getMasterName() {
|
public String getMasterName() {
|
||||||
return masterName;
|
return masterName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMasterName(String masterName) {
|
public void setMasterName(String masterName) {
|
||||||
this.masterName = masterName;
|
this.masterName = masterName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue