refactoring
This commit is contained in:
parent
44fab140de
commit
edb00db86a
|
@ -6,13 +6,13 @@ import java.util.List;
|
||||||
|
|
||||||
public class OrganizationMap extends HashMap<String, List<String>> {
|
public class OrganizationMap extends HashMap<String, List<String>> {
|
||||||
|
|
||||||
public OrganizationMap(){
|
public OrganizationMap() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> get(String key){
|
public List<String> get(String key) {
|
||||||
|
|
||||||
if (super.get(key) == null){
|
if (super.get(key) == null) {
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
return super.get(key);
|
return super.get(key);
|
||||||
|
|
|
@ -1,20 +1,19 @@
|
||||||
package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.gson.Gson;
|
import com.google.gson.Gson;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import java.util.*;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
|
||||||
|
|
||||||
public class PrepareResultCommunitySet {
|
public class PrepareResultCommunitySet {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareResultCommunitySet.class);
|
private static final Logger log = LoggerFactory.getLogger(PrepareResultCommunitySet.class);
|
||||||
|
@ -22,11 +21,12 @@ public class PrepareResultCommunitySet {
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration = IOUtils.toString(PrepareResultCommunitySet.class
|
String jsonConfiguration =
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/resulttocommunityfromorganization/input_preparecommunitytoresult_parameters.json"));
|
IOUtils.toString(
|
||||||
|
PrepareResultCommunitySet.class.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/resulttocommunityfromorganization/input_preparecommunitytoresult_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
jsonConfiguration);
|
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
@ -39,76 +39,91 @@ public class PrepareResultCommunitySet {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final OrganizationMap organizationMap = new Gson().fromJson(parser.get("organizationtoresultcommunitymap"), OrganizationMap.class);
|
final OrganizationMap organizationMap =
|
||||||
|
new Gson()
|
||||||
|
.fromJson(
|
||||||
|
parser.get("organizationtoresultcommunitymap"),
|
||||||
|
OrganizationMap.class);
|
||||||
log.info("organizationMap: {}", new Gson().toJson(organizationMap));
|
log.info("organizationMap: {}", new Gson().toJson(organizationMap));
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
||||||
|
|
||||||
runWithSparkHiveSession(conf, isSparkSessionManaged,
|
runWithSparkHiveSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
if (isTest(parser)) {
|
if (isTest(parser)) {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
}
|
}
|
||||||
prepareInfo(spark, inputPath, outputPath, organizationMap);
|
prepareInfo(spark, inputPath, outputPath, organizationMap);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void prepareInfo(SparkSession spark, String inputPath, String outputPath, OrganizationMap organizationMap) {
|
private static void prepareInfo(
|
||||||
|
SparkSession spark,
|
||||||
|
String inputPath,
|
||||||
|
String outputPath,
|
||||||
|
OrganizationMap organizationMap) {
|
||||||
Dataset<Relation> relation = readRelations(spark, inputPath);
|
Dataset<Relation> relation = readRelations(spark, inputPath);
|
||||||
relation.createOrReplaceTempView("relation");
|
relation.createOrReplaceTempView("relation");
|
||||||
|
|
||||||
String query = "SELECT result_organization.source resultId, result_organization.target orgId, org_set merges " +
|
String query =
|
||||||
"FROM (SELECT source, target " +
|
"SELECT result_organization.source resultId, result_organization.target orgId, org_set merges "
|
||||||
" FROM relation " +
|
+ "FROM (SELECT source, target "
|
||||||
" WHERE datainfo.deletedbyinference = false " +
|
+ " FROM relation "
|
||||||
" AND relClass = '" + RELATION_RESULT_ORGANIZATION_REL_CLASS + "') result_organization " +
|
+ " WHERE datainfo.deletedbyinference = false "
|
||||||
"LEFT JOIN (SELECT source, collect_set(target) org_set " +
|
+ " AND relClass = '"
|
||||||
" FROM relation " +
|
+ RELATION_RESULT_ORGANIZATION_REL_CLASS
|
||||||
" WHERE datainfo.deletedbyinference = false " +
|
+ "') result_organization "
|
||||||
" AND relClass = '" + RELATION_REPRESENTATIVERESULT_RESULT_CLASS + "' " +
|
+ "LEFT JOIN (SELECT source, collect_set(target) org_set "
|
||||||
" GROUP BY source) organization_organization " +
|
+ " FROM relation "
|
||||||
"ON result_organization.target = organization_organization.source ";
|
+ " WHERE datainfo.deletedbyinference = false "
|
||||||
|
+ " AND relClass = '"
|
||||||
|
+ RELATION_REPRESENTATIVERESULT_RESULT_CLASS
|
||||||
|
+ "' "
|
||||||
|
+ " GROUP BY source) organization_organization "
|
||||||
|
+ "ON result_organization.target = organization_organization.source ";
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<ResultOrganizations> result_organizationset = spark.sql(query)
|
org.apache.spark.sql.Dataset<ResultOrganizations> result_organizationset =
|
||||||
.as(Encoders.bean(ResultOrganizations.class));
|
spark.sql(query).as(Encoders.bean(ResultOrganizations.class));
|
||||||
|
|
||||||
result_organizationset
|
result_organizationset
|
||||||
.map(value -> {
|
.map(
|
||||||
String rId = value.getResultId();
|
value -> {
|
||||||
List<String> orgs = value.getMerges();
|
String rId = value.getResultId();
|
||||||
String oTarget = value.getOrgId();
|
Optional<List<String>> orgs = Optional.ofNullable(value.getMerges());
|
||||||
Set<String> communitySet = new HashSet<>();
|
String oTarget = value.getOrgId();
|
||||||
if (organizationMap.containsKey(oTarget)) {
|
Set<String> communitySet = new HashSet<>();
|
||||||
communitySet.addAll(organizationMap.get(oTarget));
|
if (organizationMap.containsKey(oTarget)) {
|
||||||
}
|
communitySet.addAll(organizationMap.get(oTarget));
|
||||||
try{
|
|
||||||
for (String oId : orgs) {
|
|
||||||
if (organizationMap.containsKey(oId)) {
|
|
||||||
communitySet.addAll(organizationMap.get(oId));
|
|
||||||
}
|
}
|
||||||
}
|
if (orgs.isPresent())
|
||||||
}catch(Exception e){
|
// try{
|
||||||
|
for (String oId : orgs.get()) {
|
||||||
}
|
if (organizationMap.containsKey(oId)) {
|
||||||
if (communitySet.size() > 0){
|
communitySet.addAll(organizationMap.get(oId));
|
||||||
ResultCommunityList rcl = new ResultCommunityList();
|
}
|
||||||
rcl.setResultId(rId);
|
}
|
||||||
ArrayList<String> communityList = new ArrayList<>();
|
// }catch(Exception e){
|
||||||
communityList.addAll(communitySet);
|
//
|
||||||
rcl.setCommunityList(communityList);
|
// }
|
||||||
return rcl;
|
if (communitySet.size() > 0) {
|
||||||
}
|
ResultCommunityList rcl = new ResultCommunityList();
|
||||||
return null;
|
rcl.setResultId(rId);
|
||||||
}, Encoders.bean(ResultCommunityList.class))
|
ArrayList<String> communityList = new ArrayList<>();
|
||||||
.filter(r -> r!= null)
|
communityList.addAll(communitySet);
|
||||||
.toJSON()
|
rcl.setCommunityList(communityList);
|
||||||
.write()
|
return rcl;
|
||||||
.mode(SaveMode.Overwrite)
|
}
|
||||||
.option("compression", "gzip")
|
return null;
|
||||||
.text(outputPath);
|
},
|
||||||
|
Encoders.bean(ResultCommunityList.class))
|
||||||
|
.filter(r -> r != null)
|
||||||
|
.toJSON()
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.text(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,13 @@
|
||||||
package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -10,25 +15,21 @@ import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import java.util.*;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
|
||||||
|
|
||||||
public class SparkResultToCommunityFromOrganizationJob2 {
|
public class SparkResultToCommunityFromOrganizationJob2 {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(SparkResultToCommunityFromOrganizationJob2.class);
|
private static final Logger log =
|
||||||
|
LoggerFactory.getLogger(SparkResultToCommunityFromOrganizationJob2.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration = IOUtils.toString(SparkResultToCommunityFromOrganizationJob2.class
|
String jsonConfiguration =
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/resulttocommunityfromorganization/input_communitytoresult_parameters.json"));
|
IOUtils.toString(
|
||||||
|
SparkResultToCommunityFromOrganizationJob2.class.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/resulttocommunityfromorganization/input_communitytoresult_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
jsonConfiguration);
|
|
||||||
|
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
@ -47,73 +48,79 @@ public class SparkResultToCommunityFromOrganizationJob2 {
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
final Boolean saveGraph = Optional
|
final Boolean saveGraph =
|
||||||
.ofNullable(parser.get("saveGraph"))
|
Optional.ofNullable(parser.get("saveGraph"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
log.info("saveGraph: {}", saveGraph);
|
log.info("saveGraph: {}", saveGraph);
|
||||||
|
|
||||||
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> resultClazz =
|
||||||
|
(Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
conf.set("hive.metastore.uris", parser.get("hive_metastore_uris"));
|
||||||
|
|
||||||
|
runWithSparkHiveSession(
|
||||||
runWithSparkHiveSession(conf, isSparkSessionManaged,
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
if (isTest(parser)) {
|
if (isTest(parser)) {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, outputPath);
|
||||||
}
|
}
|
||||||
execPropagation(spark, inputPath, outputPath, resultClazz, possibleupdatespath);
|
execPropagation(spark, inputPath, outputPath, resultClazz, possibleupdatespath);
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void execPropagation(SparkSession spark, String inputPath, String outputPath,
|
private static <R extends Result> void execPropagation(
|
||||||
Class<R> resultClazz, String possibleUpdatesPath) {
|
SparkSession spark,
|
||||||
org.apache.spark.sql.Dataset<ResultCommunityList> possibleUpdates = readResultCommunityList(spark, possibleUpdatesPath);
|
String inputPath,
|
||||||
|
String outputPath,
|
||||||
|
Class<R> resultClazz,
|
||||||
|
String possibleUpdatesPath) {
|
||||||
|
org.apache.spark.sql.Dataset<ResultCommunityList> possibleUpdates =
|
||||||
|
readResultCommunityList(spark, possibleUpdatesPath);
|
||||||
org.apache.spark.sql.Dataset<R> result = readPathEntity(spark, inputPath, resultClazz);
|
org.apache.spark.sql.Dataset<R> result = readPathEntity(spark, inputPath, resultClazz);
|
||||||
|
|
||||||
|
result.joinWith(
|
||||||
result
|
possibleUpdates,
|
||||||
.joinWith(possibleUpdates, result.col("id").equalTo(possibleUpdates.col("resultId")),
|
result.col("id").equalTo(possibleUpdates.col("resultId")),
|
||||||
"left_outer")
|
"left_outer")
|
||||||
.map(value -> {
|
.map(
|
||||||
R ret = value._1();
|
value -> {
|
||||||
Optional<ResultCommunityList> rcl = Optional.ofNullable(value._2());
|
R ret = value._1();
|
||||||
if(rcl.isPresent()){
|
Optional<ResultCommunityList> rcl = Optional.ofNullable(value._2());
|
||||||
ArrayList<String> communitySet = rcl.get().getCommunityList();
|
if (rcl.isPresent()) {
|
||||||
List<String> contextList = ret.getContext().stream().map(con -> con.getId()).collect(Collectors.toList());
|
ArrayList<String> communitySet = rcl.get().getCommunityList();
|
||||||
Result res = new Result();
|
List<String> contextList =
|
||||||
res.setId(ret.getId());
|
ret.getContext().stream()
|
||||||
List<Context> propagatedContexts = new ArrayList<>();
|
.map(con -> con.getId())
|
||||||
for(String cId:communitySet){
|
.collect(Collectors.toList());
|
||||||
if(!contextList.contains(cId)){
|
Result res = new Result();
|
||||||
Context newContext = new Context();
|
res.setId(ret.getId());
|
||||||
newContext.setId(cId);
|
List<Context> propagatedContexts = new ArrayList<>();
|
||||||
newContext.setDataInfo(Arrays.asList(getDataInfo(PROPAGATION_DATA_INFO_TYPE,
|
for (String cId : communitySet) {
|
||||||
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID, PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME)));
|
if (!contextList.contains(cId)) {
|
||||||
propagatedContexts.add(newContext);
|
Context newContext = new Context();
|
||||||
|
newContext.setId(cId);
|
||||||
|
newContext.setDataInfo(
|
||||||
|
Arrays.asList(
|
||||||
|
getDataInfo(
|
||||||
|
PROPAGATION_DATA_INFO_TYPE,
|
||||||
|
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID,
|
||||||
|
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME)));
|
||||||
|
propagatedContexts.add(newContext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res.setContext(propagatedContexts);
|
||||||
|
ret.mergeFrom(res);
|
||||||
}
|
}
|
||||||
}
|
return ret;
|
||||||
res.setContext(propagatedContexts);
|
},
|
||||||
ret.mergeFrom(res);
|
Encoders.bean(resultClazz))
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}, Encoders.bean(resultClazz))
|
|
||||||
.toJSON()
|
.toJSON()
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression","gzip")
|
.option("compression", "gzip")
|
||||||
.text(outputPath);
|
.text(outputPath);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static org.apache.spark.sql.Dataset<ResultCommunityList> readResultCommunityList(SparkSession spark, String possibleUpdatesPath) {
|
|
||||||
return spark
|
|
||||||
.read()
|
|
||||||
.textFile(possibleUpdatesPath)
|
|
||||||
.map(value -> OBJECT_MAPPER.readValue(value, ResultCommunityList.class), Encoders.bean(ResultCommunityList.class));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue