Compare commits

...

4 Commits

5 changed files with 9 additions and 30 deletions

View File

@ -6,8 +6,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable; import java.io.Serializable;
import java.util.*; import java.util.*;
import javax.rmi.CORBA.Util;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction; import org.apache.spark.api.java.function.FilterFunction;
@ -19,24 +17,8 @@ import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.dump.Constants;
import eu.dnetlib.dhp.oa.graph.dump.ResultMapper;
import eu.dnetlib.dhp.oa.graph.dump.Utils; import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.oa.graph.dump.subset.MasterDuplicate;
import eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult;
import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolver;
import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolverFactory;
import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.Param;
import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.SelectionConstraints;
import eu.dnetlib.dhp.oa.model.graph.GraphResult;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.KeyValue;
import scala.Tuple2; import scala.Tuple2;
/** /**

View File

@ -98,10 +98,13 @@ public class SparkDumpFunderResults implements Serializable {
if (ofunder.isPresent()) { if (ofunder.isPresent()) {
String fName = ofunder.get().getShortName(); String fName = ofunder.get().getShortName();
if (StringUtil.isNullOrEmpty(fName)) if (StringUtil.isNullOrEmpty(fName))
return ofunder.get().getName(); if (p.getId().indexOf("_") < 0)
if (fName.equalsIgnoreCase("ec")) { return p.getId().substring(0, p.getId().indexOf(":"));
else
return p.getId().substring(0, p.getId().indexOf("_"));
// ofunder.get().getName();
if (fName.equalsIgnoreCase("ec"))
fName += "_" + ofunder.get().getFundingStream(); fName += "_" + ofunder.get().getFundingStream();
}
return fName; return fName;
} else { } else {
String fName = p.getId().substring(0, p.getId().indexOf("_")).toUpperCase(); String fName = p.getId().substring(0, p.getId().indexOf("_")).toUpperCase();

View File

@ -82,7 +82,8 @@ public class SparkSelectSubset implements Serializable {
.readPath(spark, inputPath + "/relation", Relation.class) .readPath(spark, inputPath + "/relation", Relation.class)
.filter( .filter(
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference() (FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference()
&& !removeSet.contains(r.getRelClass())); && !removeSet.contains(r.getRelClass()) &&
!r.getRelType().equals("resultService"));
Dataset<String> resultIds = Utils Dataset<String> resultIds = Utils
.readPath(spark, outputPath + "/original/publication", Publication.class) .readPath(spark, outputPath + "/original/publication", Publication.class)

View File

@ -24,18 +24,10 @@
<name>metadata</name> <name>metadata</name>
<description> the metadata associated to the deposition</description> <description> the metadata associated to the deposition</description>
</property> </property>
<property>
<name>depositionType</name>
<description>the type of deposition we want to perform. "new" for brand new deposition, "version" for a new version of a published deposition (in this case the concept record id must be provided), "upload" to upload content to an open deposition for which we already have the deposition id (in this case the deposition id should be provided)</description>
</property>
<property> <property>
<name>conceptRecordId</name> <name>conceptRecordId</name>
<description>for new version, the id of the record for the old deposition</description> <description>for new version, the id of the record for the old deposition</description>
</property> </property>
<property>
<name>depositionId</name>
<description>the depositionId of a deposition open that has to be added content</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>

View File

@ -141,6 +141,7 @@
<decision name="only_upload"> <decision name="only_upload">
<switch> <switch>
<case to="send_zenodo">${wf:conf('onlyUpload') eq true}</case> <case to="send_zenodo">${wf:conf('onlyUpload') eq true}</case>
<case to="make_archive">${wf:conf('makeArchive') eq true}</case>
<default to="reset_outputpath"/> <default to="reset_outputpath"/>
</switch> </switch>
</decision> </decision>