Merge branch 'beta' into clean_relations

This commit is contained in:
Claudio Atzori 2021-09-16 11:09:47 +02:00
commit 09c2eb7f62
4 changed files with 17 additions and 11 deletions

View File

@ -68,27 +68,28 @@ public class PropagationConstant {
getDataInfo( getDataInfo(
PROPAGATION_DATA_INFO_TYPE, PROPAGATION_DATA_INFO_TYPE,
PROPAGATION_COUNTRY_INSTREPO_CLASS_ID, PROPAGATION_COUNTRY_INSTREPO_CLASS_ID,
PROPAGATION_COUNTRY_INSTREPO_CLASS_NAME)); PROPAGATION_COUNTRY_INSTREPO_CLASS_NAME,
ModelConstants.DNET_PROVENANCE_ACTIONS));
return nc; return nc;
} }
public static DataInfo getDataInfo( public static DataInfo getDataInfo(
String inference_provenance, String inference_class_id, String inference_class_name) { String inference_provenance, String inference_class_id, String inference_class_name, String qualifierSchema) {
DataInfo di = new DataInfo(); DataInfo di = new DataInfo();
di.setInferred(true); di.setInferred(true);
di.setDeletedbyinference(false); di.setDeletedbyinference(false);
di.setTrust("0.85"); di.setTrust("0.85");
di.setInferenceprovenance(inference_provenance); di.setInferenceprovenance(inference_provenance);
di.setProvenanceaction(getQualifier(inference_class_id, inference_class_name)); di.setProvenanceaction(getQualifier(inference_class_id, inference_class_name, qualifierSchema));
return di; return di;
} }
public static Qualifier getQualifier(String inference_class_id, String inference_class_name) { public static Qualifier getQualifier(String inference_class_id, String inference_class_name, String qualifierSchema) {
Qualifier pa = new Qualifier(); Qualifier pa = new Qualifier();
pa.setClassid(inference_class_id); pa.setClassid(inference_class_id);
pa.setClassname(inference_class_name); pa.setClassname(inference_class_name);
pa.setSchemeid(ModelConstants.DNET_PID_TYPES); pa.setSchemeid(qualifierSchema);
pa.setSchemename(ModelConstants.DNET_PID_TYPES); pa.setSchemename(qualifierSchema);
return pa; return pa;
} }
@ -107,7 +108,7 @@ public class PropagationConstant {
r.setRelClass(rel_class); r.setRelClass(rel_class);
r.setRelType(rel_type); r.setRelType(rel_type);
r.setSubRelType(subrel_type); r.setSubRelType(subrel_type);
r.setDataInfo(getDataInfo(inference_provenance, inference_class_id, inference_class_name)); r.setDataInfo(getDataInfo(inference_provenance, inference_class_id, inference_class_name, ModelConstants.DNET_PROVENANCE_ACTIONS));
return r; return r;
} }

View File

@ -173,13 +173,14 @@ public class SparkOrcidToResultFromSemRelJob {
if (toaddpid) { if (toaddpid) {
StructuredProperty p = new StructuredProperty(); StructuredProperty p = new StructuredProperty();
p.setValue(autoritative_author.getOrcid()); p.setValue(autoritative_author.getOrcid());
p.setQualifier(getQualifier(ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME)); p.setQualifier(getQualifier(ModelConstants.ORCID_PENDING, ModelConstants.ORCID_CLASSNAME, ModelConstants.DNET_PID_TYPES));
p p
.setDataInfo( .setDataInfo(
getDataInfo( getDataInfo(
PROPAGATION_DATA_INFO_TYPE, PROPAGATION_DATA_INFO_TYPE,
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID, PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID,
PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME)); PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME,
ModelConstants.DNET_PROVENANCE_ACTIONS));
Optional<List<StructuredProperty>> authorPid = Optional.ofNullable(author.getPid()); Optional<List<StructuredProperty>> authorPid = Optional.ofNullable(author.getPid());
if (authorPid.isPresent()) { if (authorPid.isPresent()) {

View File

@ -10,6 +10,7 @@ import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -128,7 +129,8 @@ public class SparkResultToCommunityFromOrganizationJob {
getDataInfo( getDataInfo(
PROPAGATION_DATA_INFO_TYPE, PROPAGATION_DATA_INFO_TYPE,
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID, PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_ID,
PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME))); PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME,
ModelConstants.DNET_PROVENANCE_ACTIONS)));
propagatedContexts.add(newContext); propagatedContexts.add(newContext);
} }
} }

View File

@ -7,6 +7,7 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -124,7 +125,8 @@ public class SparkResultToCommunityThroughSemRelJob {
getDataInfo( getDataInfo(
PROPAGATION_DATA_INFO_TYPE, PROPAGATION_DATA_INFO_TYPE,
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_ID, PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_ID,
PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_NAME))); PROPAGATION_RESULT_COMMUNITY_SEMREL_CLASS_NAME,
ModelConstants.DNET_PROVENANCE_ACTIONS)));
return newContext; return newContext;
} }
return null; return null;