forked from D-Net/dnet-hadoop
[Dump Funders] new code for the dump of products related to funders
This commit is contained in:
parent
5331dea71b
commit
f738acb85a
|
@ -1,17 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.spark.SparkConf;
|
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
|
||||||
import org.apache.spark.sql.*;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -19,7 +9,21 @@ import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
|
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
|
||||||
|
@ -50,7 +54,6 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -70,16 +73,16 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||||
|
|
||||||
|
log.info("Number of result {}", result.count());
|
||||||
|
|
||||||
List<String> funderList = result.flatMap((FlatMapFunction<CommunityResult, String>) cr ->
|
Dataset<String> tmp = result
|
||||||
cr.getProjects().stream().map(p -> {
|
.flatMap((FlatMapFunction<CommunityResult, String>) cr -> cr.getProjects().stream().map(p -> {
|
||||||
String fName = p.getFunder().getShortName();
|
return getFunderName(p);
|
||||||
if (fName.equalsIgnoreCase("ec")) {
|
|
||||||
fName += "_" + p.getFunder().getFundingStream();
|
}).collect(Collectors.toList()).iterator(), Encoders.STRING())
|
||||||
}
|
.distinct();
|
||||||
return fName;
|
tmp.foreach((ForeachFunction<String>) f -> log.info("Found Funder {}", f));
|
||||||
}).collect(Collectors.toList()).iterator()
|
List<String> funderList = tmp.collectAsList();
|
||||||
, Encoders.STRING()).distinct().collectAsList();
|
|
||||||
|
|
||||||
funderList.forEach(funder -> {
|
funderList.forEach(funder -> {
|
||||||
dumpResults(funder, result, outputPath);
|
dumpResults(funder, result, outputPath);
|
||||||
|
@ -87,6 +90,36 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
private static String getFunderName(Project p) {
|
||||||
|
Optional<Funder> ofunder = Optional.ofNullable(p.getFunder());
|
||||||
|
if (ofunder.isPresent()) {
|
||||||
|
String fName = ofunder.get().getShortName();
|
||||||
|
if (fName.equalsIgnoreCase("ec")) {
|
||||||
|
fName += "_" + ofunder.get().getFundingStream();
|
||||||
|
}
|
||||||
|
return fName;
|
||||||
|
} else {
|
||||||
|
String fName = p.getId().substring(3, p.getId().indexOf("_")).toUpperCase();
|
||||||
|
if (fName.equalsIgnoreCase("ec")) {
|
||||||
|
if (p.getId().contains("h2020")) {
|
||||||
|
fName += "_H2020";
|
||||||
|
} else {
|
||||||
|
fName += "_FP7";
|
||||||
|
}
|
||||||
|
} else if (fName.equalsIgnoreCase("conicytf")) {
|
||||||
|
fName = "CONICYT";
|
||||||
|
} else if (fName.equalsIgnoreCase("dfgf")) {
|
||||||
|
fName = "DFG";
|
||||||
|
} else if (fName.equalsIgnoreCase("tubitakf")) {
|
||||||
|
fName = "TUBITAK";
|
||||||
|
} else if (fName.equalsIgnoreCase("euenvagency")) {
|
||||||
|
fName = "EEA";
|
||||||
|
}
|
||||||
|
return fName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static void dumpResults(String funder, Dataset<CommunityResult> results, String outputPath) {
|
private static void dumpResults(String funder, Dataset<CommunityResult> results, String outputPath) {
|
||||||
|
|
||||||
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
||||||
|
@ -94,10 +127,7 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
for (Project p : r.getProjects()) {
|
for (Project p : r.getProjects()) {
|
||||||
String fName = p.getFunder().getShortName();
|
String fName = getFunderName(p);
|
||||||
if (fName.equalsIgnoreCase("ec")){
|
|
||||||
fName += "_" + p.getFunder().getFundingStream();
|
|
||||||
}
|
|
||||||
if (fName.equalsIgnoreCase(funder)) {
|
if (fName.equalsIgnoreCase(funder)) {
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
|
@ -111,6 +141,4 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
.json(outputPath + "/" + funder);
|
.json(outputPath + "/" + funder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
import java.io.Serializable;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
@ -13,13 +17,10 @@ import org.apache.spark.sql.*;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import java.util.List;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
import java.util.Objects;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import java.util.Optional;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
|
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
|
||||||
|
@ -72,16 +73,16 @@ public class SparkDumpFunderResults2 implements Serializable {
|
||||||
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||||
|
|
||||||
|
List<String> funderList = result
|
||||||
List<String> funderList = result.flatMap((FlatMapFunction<CommunityResult, String>) cr ->
|
.flatMap((FlatMapFunction<CommunityResult, String>) cr -> cr.getProjects().stream().map(p -> {
|
||||||
cr.getProjects().stream().map(p -> {
|
|
||||||
String fName = p.getFunder().getShortName();
|
String fName = p.getFunder().getShortName();
|
||||||
if (fName.equalsIgnoreCase("ec")) {
|
if (fName.equalsIgnoreCase("ec")) {
|
||||||
fName += "_" + p.getFunder().getFundingStream();
|
fName += "_" + p.getFunder().getFundingStream();
|
||||||
}
|
}
|
||||||
return fName;
|
return fName;
|
||||||
}).collect(Collectors.toList()).iterator()
|
}).collect(Collectors.toList()).iterator(), Encoders.STRING())
|
||||||
, Encoders.STRING()).distinct().collectAsList();
|
.distinct()
|
||||||
|
.collectAsList();
|
||||||
|
|
||||||
funderList.forEach(funder -> {
|
funderList.forEach(funder -> {
|
||||||
|
|
||||||
|
@ -114,6 +115,4 @@ public class SparkDumpFunderResults2 implements Serializable {
|
||||||
.json(outputPath + "/" + funder);
|
.json(outputPath + "/" + funder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -298,6 +298,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/validrelation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/validrelation</arg>
|
||||||
|
|
|
@ -136,6 +136,7 @@
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
||||||
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -162,6 +163,7 @@
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
||||||
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -188,6 +190,7 @@
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
||||||
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -214,6 +217,7 @@
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
||||||
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -75,7 +75,6 @@ public class SplitPerFunderTest {
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/ext")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/ext")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
|
|
||||||
SparkDumpFunderResults.main(new String[] {
|
SparkDumpFunderResults.main(new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-outputPath", workingDir.toString() + "/split",
|
"-outputPath", workingDir.toString() + "/split",
|
||||||
|
@ -147,9 +146,6 @@ public class SplitPerFunderTest {
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
Assertions.assertEquals(1, tmp.count());
|
Assertions.assertEquals(1, tmp.count());
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue