horizontalConstraints #259

Merged
miriam.baglioni merged 4 commits from horizontalConstraints into beta 2022-11-28 18:20:18 +01:00
4 changed files with 17 additions and 19 deletions
Showing only changes of commit 1cb79719a7 - Show all commits

View File

@ -152,7 +152,8 @@ public class ResultTagger implements Serializable {
}
result.getContext().forEach(c -> {
if (communities.contains(c.getId())) {
String cId = c.getId();
if (communities.contains(cId)) {
Optional<List<DataInfo>> opt_dataInfoList = Optional.ofNullable(c.getDataInfo());
List<DataInfo> dataInfoList;
if (opt_dataInfoList.isPresent())
@ -161,19 +162,19 @@ public class ResultTagger implements Serializable {
dataInfoList = new ArrayList<>();
c.setDataInfo(dataInfoList);
}
if (subjects.contains(c))
if (subjects.contains(cId))
dataInfoList
.add(OafMapperUtils.dataInfo(false, BULKTAG_DATA_INFO_TYPE, true, false,
OafMapperUtils.qualifier(CLASS_ID_SUBJECT, CLASS_NAME_BULKTAG_SUBJECT, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), TAGGING_TRUST));
if (datasources.contains(c))
if (datasources.contains(cId))
dataInfoList
.add(OafMapperUtils.dataInfo(false, BULKTAG_DATA_INFO_TYPE, true, false,
OafMapperUtils.qualifier(CLASS_ID_DATASOURCE, CLASS_NAME_BULKTAG_DATASOURCE, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), TAGGING_TRUST));
if (czenodo.contains(c))
if (czenodo.contains(cId))
dataInfoList
.add(OafMapperUtils.dataInfo(false, BULKTAG_DATA_INFO_TYPE, true, false,
OafMapperUtils.qualifier(CLASS_ID_CZENODO, CLASS_NAME_BULKTAG_ZENODO, DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS), TAGGING_TRUST));
if (aconstraints.contains(c))
if (aconstraints.contains(cId))
dataInfoList
.add(
OafMapperUtils.dataInfo(false, BULKTAG_DATA_INFO_TYPE, true, false,

View File

@ -21,6 +21,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
@ -778,18 +779,4 @@ public class BulkTagJobTest {
1, idExplodeCommunity.filter("provenance = 'community:advconstraint'").count());
}
// @Test
// void test1(){
// ProtoMap params = new Gson().fromJson(pathMap, ProtoMap.class);
// HashMap<String, String> param = new HashMap<>();
// for (String key : params.keySet()) {
// try {
// param.put(key, jsonContext.read(params.get(key)));
// } catch (com.jayway.jsonpath.PathNotFoundException e) {
// param.put(key, new ArrayList<>());
// }
// }
// return param;
// }
// }
}