extended code to select the relations between the set of results in eosc. Added also the related step in the workflow
This commit is contained in:
parent
57a0b96419
commit
d5420960d1
|
@ -0,0 +1,119 @@
|
||||||
|
package eu.dnetlib.dhp.oa.graph.dump.eosc;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.eosc.model.EoscResult;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 12/01/23
|
||||||
|
*/
|
||||||
|
public class SparkSelectRelation Serializable {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(SparkSelectRelation.class);
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
SparkSelectRelation.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
Optional<String> rs = Optional.ofNullable(parser.get("removeSet"));
|
||||||
|
final Set<String> removeSet = new HashSet<>();
|
||||||
|
if (rs.isPresent()) {
|
||||||
|
Collections.addAll(removeSet, rs.get().split(";"));
|
||||||
|
}
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
selectSubset(spark, inputPath, outputPath, removeSet);
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void selectSubset(SparkSession spark, String inputPath, String outputPath, Set<String> removeSet) {
|
||||||
|
Dataset<Relation> relation = Utils
|
||||||
|
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||||
|
.filter(
|
||||||
|
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference()
|
||||||
|
&& !removeSet.contains(r.getRelClass()));
|
||||||
|
|
||||||
|
Dataset<String> resultIds = Utils
|
||||||
|
.readPath(spark, outputPath + "/publication", EoscResult.class)
|
||||||
|
|
||||||
|
.map((MapFunction<EoscResult, String>) p -> p.getId(), Encoders.STRING())
|
||||||
|
.union(
|
||||||
|
Utils
|
||||||
|
.readPath(spark, outputPath + "/dataset", EoscResult.class)
|
||||||
|
|
||||||
|
.map((MapFunction<EoscResult, String>) d -> d.getId(), Encoders.STRING()))
|
||||||
|
.union(
|
||||||
|
Utils
|
||||||
|
.readPath(spark, outputPath + "/software", EoscResult.class)
|
||||||
|
|
||||||
|
.map((MapFunction<EoscResult, String>) s -> s.getId(), Encoders.STRING()))
|
||||||
|
.union(
|
||||||
|
Utils
|
||||||
|
.readPath(spark, outputPath + "/otherresearchproduct", EoscResult.class)
|
||||||
|
|
||||||
|
.map((MapFunction<EoscResult, String>) o -> o.getId(), Encoders.STRING()));
|
||||||
|
|
||||||
|
// select result -> result relations
|
||||||
|
Dataset<Relation> relResultResult = relation
|
||||||
|
.joinWith(resultIds, relation.col("source").equalTo(resultIds.col("value")))
|
||||||
|
.map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
relResultResult
|
||||||
|
.joinWith(resultIds, relResultResult.col("target").equalTo(resultIds.col("value")))
|
||||||
|
.map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(outputPath + "/relation");
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -547,7 +547,33 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<join name="join_extend" to="make_archive"/>
|
<join name="join_extend" to="select_relation"/>
|
||||||
|
|
||||||
|
<action name="select_relation">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Select the set of relations between the results in the selected set</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.SparkSelectRelation</class>
|
||||||
|
<jar>dump-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/dump</arg>
|
||||||
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--dumpType</arg><arg>eosc</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="dump_relation"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<action name="make_archive">
|
<action name="make_archive">
|
||||||
<java>
|
<java>
|
||||||
|
|
Loading…
Reference in New Issue