diff --git a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml
index e7320de3b3..321ca40909 100644
--- a/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml
+++ b/dhp-workflows/dhp-graph-mapper/src/main/resources/eu/dnetlib/dhp/oa/graph/raw_all/oozie_app/workflow.xml
@@ -548,42 +548,7 @@
-
-
-
-
-
- ${(shouldPatchRelations eq "true") and
- (fs:exists(concat(concat(wf:conf('nameNode'),'/'),wf:conf('idMappingPath'))) eq "true")}
-
-
-
-
-
-
-
- yarn
- cluster
- PatchRelations
- eu.dnetlib.dhp.oa.graph.raw.PatchRelationsApplication
- dhp-graph-mapper-${projectVersion}.jar
-
- --executor-memory ${sparkExecutorMemory}
- --executor-cores ${sparkExecutorCores}
- --driver-memory=${sparkDriverMemory}
- --conf spark.extraListeners=${spark2ExtraListeners}
- --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
- --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
- --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
- --conf spark.sql.shuffle.partitions=7680
-
- --graphBasePath${workingDir}/graph_raw
- --workingDir${workingDir}/patch_relations
- --idMappingPath${idMappingPath}
-
-
-
-
+
@@ -596,7 +561,6 @@
-
yarn
@@ -805,7 +769,42 @@
-
+
+
+
+
+
+ ${(shouldPatchRelations eq "true") and
+ (fs:exists(concat(concat(wf:conf('nameNode'),'/'),wf:conf('idMappingPath'))) eq "true")}
+
+
+
+
+
+
+
+ yarn
+ cluster
+ PatchRelations
+ eu.dnetlib.dhp.oa.graph.raw.PatchRelationsApplication
+ dhp-graph-mapper-${projectVersion}.jar
+
+ --executor-memory ${sparkExecutorMemory}
+ --executor-cores ${sparkExecutorCores}
+ --driver-memory=${sparkDriverMemory}
+ --conf spark.extraListeners=${spark2ExtraListeners}
+ --conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
+ --conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
+ --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
+ --conf spark.sql.shuffle.partitions=7680
+
+ --graphBasePath${graphOutputPath}
+ --workingDir${workingDir}/patch_relations
+ --idMappingPath${idMappingPath}
+
+
+
+
\ No newline at end of file
diff --git a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java
index 7d53d35549..b3f7854924 100644
--- a/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java
+++ b/dhp-workflows/dhp-graph-provision/src/main/java/eu/dnetlib/dhp/oa/provision/PrepareRelationsJob.java
@@ -10,6 +10,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
@@ -81,6 +82,7 @@ public class PrepareRelationsJob {
Set relationFilter = Optional
.ofNullable(parser.get("relationFilter"))
+ .map(String::toLowerCase)
.map(s -> Sets.newHashSet(Splitter.on(",").split(s)))
.orElse(new HashSet<>());
log.info("relationFilter: {}", relationFilter);
@@ -130,7 +132,7 @@ public class PrepareRelationsJob {
JavaRDD rels = readPathRelationRDD(spark, inputRelationsPath)
.filter(rel -> rel.getDataInfo().getDeletedbyinference() == false)
- .filter(rel -> relationFilter.contains(rel.getRelClass()) == false);
+ .filter(rel -> relationFilter.contains(StringUtils.lowerCase(rel.getRelClass())) == false);
JavaRDD pruned = pruneRels(
pruneRels(