package eu.dnetlib.dhp.oa.graph.dump.subset; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import java.io.Serializable; import java.io.StringReader; import java.util.*; import java.util.stream.Collectors; import org.apache.commons.io.IOUtils; import org.apache.spark.SparkConf; import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import org.dom4j.Document; import org.dom4j.DocumentException; import org.dom4j.Node; import org.dom4j.io.SAXReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.oa.graph.dump.Constants; import eu.dnetlib.dhp.oa.graph.dump.ResultMapper; import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolver; import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolverFactory; import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.Param; import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.SelectionConstraints; import eu.dnetlib.dhp.oa.model.graph.*; import eu.dnetlib.dhp.schema.oaf.*; /** * Spark Job that fires the dump for the entities */ public class SparkDumpResult implements Serializable { private static final Logger log = LoggerFactory.getLogger(SparkDumpResult.class); private static final VerbResolver resolver = VerbResolverFactory.newInstance(); public static final String COMPRESSION = "compression"; public static final String GZIP = "gzip"; public static void main(String[] args) throws Exception { String jsonConfiguration = IOUtils .toString( SparkDumpResult.class .getResourceAsStream( "/eu/dnetlib/dhp/oa/graph/dump/input_parameters.json")); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); parser.parseArgument(args); Boolean isSparkSessionManaged = Optional .ofNullable(parser.get("isSparkSessionManaged")) .map(Boolean::valueOf) .orElse(Boolean.TRUE); log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final String inputPath = parser.get("sourcePath"); log.info("inputPath: {}", inputPath); final String outputPath = parser.get("outputPath"); log.info("outputPath: {}", outputPath); final String resultType = parser.get("resultType"); log.info("resultType: {}", resultType); final String resultClassName = parser.get("resultTableName"); log.info("resultTableName: {}", resultClassName); Optional pathString = Optional.ofNullable(parser.get("pathMap")); HashMap pathMap = null; if (pathString.isPresent()) { pathMap = new Gson().fromJson(parser.get("pathMap"), HashMap.class); log.info("pathMap: {}", new Gson().toJson(pathMap)); } final Optional parameter = Optional.ofNullable(parser.get("selectionCriteria")); SelectionConstraints selectionConstraints = null; if (parameter.isPresent()) { selectionConstraints = new ObjectMapper().readValue(parameter.get(), SelectionConstraints.class); selectionConstraints.addResolver(resolver); } Class inputClazz = (Class) Class .forName(resultClassName); run( isSparkSessionManaged, inputPath, outputPath, pathMap, selectionConstraints, inputClazz, resultType); } private static void run(Boolean isSparkSessionManaged, String inputPath, String outputPath, HashMap pathMap, SelectionConstraints selectionConstraints, Class inputClazz, String resultType) { SparkConf conf = new SparkConf(); HashMap finalPathMap = pathMap; SelectionConstraints finalSelectionConstraints = selectionConstraints; runWithSparkSession( conf, isSparkSessionManaged, spark -> { Utils.removeOutputDir(spark, outputPath + "/original/" + resultType); Utils.removeOutputDir(spark, outputPath + "/dump/" + resultType); resultDump( spark, inputPath, outputPath, inputClazz, finalPathMap, finalSelectionConstraints, resultType); }); } public static void resultDump( SparkSession spark, String inputPath, String outputPath, Class inputClazz, Map pathMap, SelectionConstraints selectionConstraints, String resultType) { Utils .readPath(spark, inputPath, inputClazz) .map( (MapFunction) value -> filterResult( value, pathMap, selectionConstraints, inputClazz, resultType), Encoders.bean(inputClazz)) .filter(Objects::nonNull) .write() .mode(SaveMode.Overwrite) .option("compression", "gzip") .json(outputPath + "/original/" + resultType); Utils .readPath(spark, outputPath + "/original/" + resultType, inputClazz) .map( (MapFunction) value -> (GraphResult) ResultMapper .map( value, null, Constants.DUMPTYPE.COMPLETE.getType()), Encoders.bean(GraphResult.class)) .map((MapFunction) r -> new ObjectMapper().writeValueAsString(r), Encoders.STRING()) .write() .mode(SaveMode.Overwrite) .option(COMPRESSION, GZIP) .text(outputPath + "/dump/" + resultType); } private static I filterResult(I value, Map pathMap, SelectionConstraints selectionConstraints, Class inputClazz, String resultType) { Optional odInfo = Optional.ofNullable(value.getDataInfo()); if (Boolean.FALSE.equals(odInfo.isPresent())) { return null; } if (Boolean.TRUE.equals(odInfo.get().getDeletedbyinference()) || Boolean.TRUE.equals(odInfo.get().getInvisible())) { return null; } if (!isCompatible(value.getResulttype().getClassid(), resultType)) { return null; } if (selectionConstraints != null) { Param param = new Param(); String json = new Gson().toJson(value, inputClazz); DocumentContext jsonContext = JsonPath.parse(json); for (String key : pathMap.keySet()) { try { param.insert(key, jsonContext.read(pathMap.get(key))); } catch (com.jayway.jsonpath.PathNotFoundException e) { param.insert(key, new ArrayList<>()); } } if (!selectionConstraints.verifyCriteria(param)) { return null; } } return value; } private static boolean isCompatible(String classid, String resultType) { return (classid.equals(resultType) || (classid.equals("other") && resultType.equals("otherresearchproduct"))); } }