forked from D-Net/dnet-hadoop
refactoring
This commit is contained in:
parent
4ae6fba01d
commit
72e5aa3b42
|
@ -4,6 +4,11 @@ package eu.dnetlib.dhp.resulttoorganizationfrominstrepo;
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -22,11 +27,6 @@ import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Organization;
|
import eu.dnetlib.dhp.schema.oaf.Organization;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
public class PrepareResultInstRepoAssociation {
|
public class PrepareResultInstRepoAssociation {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareResultInstRepoAssociation.class);
|
private static final Logger log = LoggerFactory.getLogger(PrepareResultInstRepoAssociation.class);
|
||||||
|
@ -56,7 +56,8 @@ public class PrepareResultInstRepoAssociation {
|
||||||
final String alreadyLinkedPath = parser.get("alreadyLinkedPath");
|
final String alreadyLinkedPath = parser.get("alreadyLinkedPath");
|
||||||
log.info("alreadyLinkedPath {}: ", alreadyLinkedPath);
|
log.info("alreadyLinkedPath {}: ", alreadyLinkedPath);
|
||||||
|
|
||||||
List<String> blacklist = Optional.ofNullable(parser.get("blacklist"))
|
List<String> blacklist = Optional
|
||||||
|
.ofNullable(parser.get("blacklist"))
|
||||||
.map(v -> Arrays.asList(v.split(";")))
|
.map(v -> Arrays.asList(v.split(";")))
|
||||||
.orElse(new ArrayList<>());
|
.orElse(new ArrayList<>());
|
||||||
|
|
||||||
|
@ -91,14 +92,13 @@ public class PrepareResultInstRepoAssociation {
|
||||||
private static void prepareDatasourceOrganization(
|
private static void prepareDatasourceOrganization(
|
||||||
SparkSession spark, String datasourceOrganizationPath, List<String> blacklist) {
|
SparkSession spark, String datasourceOrganizationPath, List<String> blacklist) {
|
||||||
String blacklisted = "";
|
String blacklisted = "";
|
||||||
if(blacklist.size() > 0 ){
|
if (blacklist.size() > 0) {
|
||||||
blacklisted = " AND d.id != '" + blacklist.get(0) + "'";
|
blacklisted = " AND d.id != '" + blacklist.get(0) + "'";
|
||||||
for (int i = 1; i < blacklist.size(); i++) {
|
for (int i = 1; i < blacklist.size(); i++) {
|
||||||
blacklisted += " AND d.id != '" + blacklist.get(i) + "'";
|
blacklisted += " AND d.id != '" + blacklist.get(i) + "'";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
String query = "SELECT source datasourceId, target organizationId "
|
String query = "SELECT source datasourceId, target organizationId "
|
||||||
+ "FROM ( SELECT id "
|
+ "FROM ( SELECT id "
|
||||||
+ "FROM datasource "
|
+ "FROM datasource "
|
||||||
|
|
Loading…
Reference in New Issue