[cleaning] improved vocabulary based mapping, specialization for the strict vocab cleaning

This commit is contained in:
Claudio Atzori 2022-12-12 14:43:03 +01:00
parent c18b8048c3
commit b8bafab8a0
8 changed files with 71 additions and 40 deletions

View File

@ -4,6 +4,7 @@ package eu.dnetlib.dhp.common.vocabulary;
import java.io.Serializable; import java.io.Serializable;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -66,27 +67,39 @@ public class Vocabulary implements Serializable {
} }
public Qualifier getTermAsQualifier(final String termId) { public Qualifier getTermAsQualifier(final String termId) {
if (StringUtils.isBlank(termId)) { return getTermAsQualifier(termId, false);
}
public Qualifier getTermAsQualifier(final String termId, boolean strict) {
final VocabularyTerm term = getTerm(termId);
if (Objects.nonNull(term)) {
return OafMapperUtils.qualifier(term.getId(), term.getName(), getId(), getName());
} else if (Objects.isNull(term) && strict) {
return OafMapperUtils.unknown(getId(), getName()); return OafMapperUtils.unknown(getId(), getName());
} else if (termExists(termId)) {
final VocabularyTerm t = getTerm(termId);
return OafMapperUtils.qualifier(t.getId(), t.getName(), getId(), getName());
} else { } else {
return OafMapperUtils.qualifier(termId, termId, getId(), getName()); return OafMapperUtils.qualifier(termId, termId, getId(), getName());
} }
} }
public Qualifier getSynonymAsQualifier(final String syn) { public Qualifier getSynonymAsQualifier(final String syn) {
return getSynonymAsQualifier(syn, false);
}
public Qualifier getSynonymAsQualifier(final String syn, boolean strict) {
return Optional return Optional
.ofNullable(getTermBySynonym(syn)) .ofNullable(getTermBySynonym(syn))
.map(term -> getTermAsQualifier(term.getId())) .map(term -> getTermAsQualifier(term.getId(), strict))
.orElse(null); .orElse(null);
} }
public Qualifier lookup(String id) { public Qualifier lookup(String id) {
return lookup(id, false);
}
public Qualifier lookup(String id, boolean strict) {
return Optional return Optional
.ofNullable(getSynonymAsQualifier(id)) .ofNullable(getSynonymAsQualifier(id, strict))
.orElse(getTermAsQualifier(id)); .orElse(getTermAsQualifier(id, strict));
} }
} }

View File

@ -333,7 +333,7 @@ public class GraphCleaningFunctions extends CleaningFunctions {
if (Objects.isNull(i.getHostedby()) || StringUtils.isBlank(i.getHostedby().getKey())) { if (Objects.isNull(i.getHostedby()) || StringUtils.isBlank(i.getHostedby().getKey())) {
i.setHostedby(ModelConstants.UNKNOWN_REPOSITORY); i.setHostedby(ModelConstants.UNKNOWN_REPOSITORY);
} }
if (Objects.isNull(i.getRefereed())) { if (Objects.isNull(i.getRefereed()) || StringUtils.isBlank(i.getRefereed().getClassid())) {
i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS)); i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS));
} }
if (Objects.nonNull(i.getDateofacceptance())) { if (Objects.nonNull(i.getDateofacceptance())) {

View File

@ -42,7 +42,7 @@ public class CleaningRuleMap extends HashMap<Class<?>, SerializableConsumer<Obje
vocabularies.find(vocabularyId).ifPresent(vocabulary -> { vocabularies.find(vocabularyId).ifPresent(vocabulary -> {
if (ModelConstants.DNET_SUBJECT_KEYWORD.equalsIgnoreCase(subject.getQualifier().getClassid())) { if (ModelConstants.DNET_SUBJECT_KEYWORD.equalsIgnoreCase(subject.getQualifier().getClassid())) {
Qualifier newValue = vocabulary.lookup(subject.getValue()); Qualifier newValue = vocabulary.lookup(subject.getValue(), true);
if (!ModelConstants.UNKNOWN.equals(newValue.getClassid())) { if (!ModelConstants.UNKNOWN.equals(newValue.getClassid())) {
subject.setValue(newValue.getClassid()); subject.setValue(newValue.getClassid());
subject.getQualifier().setClassid(vocabularyId); subject.getQualifier().setClassid(vocabularyId);

View File

@ -82,10 +82,10 @@ public class CleanContextTest {
CleanContextSparkJob.main(new String[] { CleanContextSparkJob.main(new String[] {
"--isSparkSessionManaged", Boolean.FALSE.toString(), "--isSparkSessionManaged", Boolean.FALSE.toString(),
"--inputPath", workingDir.toString() + "/publication", "--inputPath", workingDir.toString() + "/publication",
"-graphTableClassName", Publication.class.getCanonicalName(), "--graphTableClassName", Publication.class.getCanonicalName(),
"-workingPath", workingDir.toString() + "/working", "--workingDir", workingDir.toString() + "/working",
"-contextId", "sobigdata", "--contextId", "sobigdata",
"-verifyParam", "gCube " "--verifyParam", "gCube "
}); });
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());

View File

@ -83,12 +83,12 @@ public class CleanCountryTest {
CleanCountrySparkJob.main(new String[] { CleanCountrySparkJob.main(new String[] {
"--isSparkSessionManaged", Boolean.FALSE.toString(), "--isSparkSessionManaged", Boolean.FALSE.toString(),
"--inputPath", workingDir.toString() + "/publication", "--inputPath", workingDir.toString() + "/publication",
"-graphTableClassName", Publication.class.getCanonicalName(), "--graphTableClassName", Publication.class.getCanonicalName(),
"-workingPath", workingDir.toString() + "/working", "--workingDir", workingDir.toString() + "/working",
"-country", "NL", "--country", "NL",
"-verifyParam", "10.17632", "--verifyParam", "10.17632",
"-collectedfrom", "NARCIS", "--collectedfrom", "NARCIS",
"-hostedBy", getClass() "--hostedBy", getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy") .getResource("/eu/dnetlib/dhp/oa/graph/clean/hostedBy")
.getPath() .getPath()
}); });

View File

@ -7,6 +7,7 @@ import static org.mockito.Mockito.lenient;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@ -278,20 +279,25 @@ public class GraphCleaningFunctionsTest {
s -> "0102 computer and information sciences".equals(s.getValue()) & s -> "0102 computer and information sciences".equals(s.getValue()) &
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()))); ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid())));
List<Subject> s1 = p_cleaned verify_keyword(p_cleaned, "In Situ Hybridization");
.getSubject() verify_keyword(p_cleaned, "Avicennia");
.stream()
.filter(s -> s.getValue().equals("In Situ Hybridization"))
.collect(Collectors.toList());
assertNotNull(s1);
assertEquals(1, s1.size());
assertEquals(ModelConstants.DNET_SUBJECT_KEYWORD, s1.get(0).getQualifier().getClassid());
assertEquals(ModelConstants.DNET_SUBJECT_KEYWORD, s1.get(0).getQualifier().getClassname());
// TODO add more assertions to verity the cleaned values // TODO add more assertions to verity the cleaned values
System.out.println(MAPPER.writeValueAsString(p_cleaned)); System.out.println(MAPPER.writeValueAsString(p_cleaned));
} }
private static void verify_keyword(Publication p_cleaned, String subject) {
Optional<Subject> s1 = p_cleaned
.getSubject()
.stream()
.filter(s -> s.getValue().equals(subject))
.findFirst();
assertTrue(s1.isPresent());
assertEquals(ModelConstants.DNET_SUBJECT_KEYWORD, s1.get().getQualifier().getClassid());
assertEquals(ModelConstants.DNET_SUBJECT_KEYWORD, s1.get().getQualifier().getClassname());
}
private Stream<Qualifier> getAuthorPidTypes(Result pub) { private Stream<Qualifier> getAuthorPidTypes(Result pub) {
return pub return pub
.getAuthor() .getAuthor()

View File

@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup; import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*; import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.utils.GraphCleaningFunctions;
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory; import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
import eu.dnetlib.dhp.schema.oaf.utils.PidType; import eu.dnetlib.dhp.schema.oaf.utils.PidType;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
@ -238,7 +239,11 @@ class MappersTest {
assertNotNull(i.getAccessright()); assertNotNull(i.getAccessright());
assertEquals("OPEN", i.getAccessright().getClassid()); assertEquals("OPEN", i.getAccessright().getClassid());
}); });
assertEquals("UNKNOWN", p.getInstance().get(0).getRefereed().getClassid());
Publication p_cleaned = cleanup(p, vocs);
assertEquals("0000", p_cleaned.getInstance().get(0).getRefereed().getClassid());
assertEquals("Unknown", p_cleaned.getInstance().get(0).getRefereed().getClassname());
assertNotNull(p.getInstance().get(0).getPid()); assertNotNull(p.getInstance().get(0).getPid());
assertEquals(2, p.getInstance().get(0).getPid().size()); assertEquals(2, p.getInstance().get(0).getPid().size());
@ -453,7 +458,10 @@ class MappersTest {
assertNotNull(i.getAccessright()); assertNotNull(i.getAccessright());
assertEquals("OPEN", i.getAccessright().getClassid()); assertEquals("OPEN", i.getAccessright().getClassid());
}); });
assertEquals("UNKNOWN", p.getInstance().get(0).getRefereed().getClassid());
Publication p_cleaned = cleanup(p, vocs);
assertEquals("0000", p_cleaned.getInstance().get(0).getRefereed().getClassid());
assertEquals("Unknown", p_cleaned.getInstance().get(0).getRefereed().getClassname());
} }
@Test @Test
@ -570,7 +578,9 @@ class MappersTest {
assertTrue(i.getUrl().contains("http://apps.who.int/trialsearch/Trial3.aspx?trialid=NCT02321059")); assertTrue(i.getUrl().contains("http://apps.who.int/trialsearch/Trial3.aspx?trialid=NCT02321059"));
assertTrue(i.getUrl().contains("https://clinicaltrials.gov/ct2/show/NCT02321059")); assertTrue(i.getUrl().contains("https://clinicaltrials.gov/ct2/show/NCT02321059"));
assertEquals("UNKNOWN", i.getRefereed().getClassid()); Dataset d_cleaned = cleanup(d, vocs);
assertEquals("0000", d_cleaned.getInstance().get(0).getRefereed().getClassid());
assertEquals("Unknown", d_cleaned.getInstance().get(0).getRefereed().getClassname());
} }
@Test @Test
@ -871,7 +881,10 @@ class MappersTest {
assertNotNull(i.getAccessright()); assertNotNull(i.getAccessright());
assertEquals("UNKNOWN", i.getAccessright().getClassid()); assertEquals("UNKNOWN", i.getAccessright().getClassid());
}); });
assertEquals("UNKNOWN", p.getInstance().get(0).getRefereed().getClassid());
Dataset p_cleaned = cleanup(p, vocs);
assertEquals("0000", p_cleaned.getInstance().get(0).getRefereed().getClassid());
assertEquals("Unknown", p_cleaned.getInstance().get(0).getRefereed().getClassname());
} }
@Test @Test

View File

@ -907,24 +907,23 @@
{ {
"dataInfo": { "dataInfo": {
"deletedbyinference": false, "deletedbyinference": false,
"inferenceprovenance": "",
"inferred": false, "inferred": false,
"invisible": false, "invisible": false,
"provenanceaction": { "provenanceaction": {
"classid": "sysimport:crosswalk:datasetarchive", "classid": "sysimport:actionset",
"classname": "sysimport:crosswalk:datasetarchive", "classname": "Harvested",
"schemeid": "dnet:provenanceActions", "schemeid": "dnet:provenanceActions",
"schemename": "dnet:provenanceActions" "schemename": "dnet:provenanceActions"
}, },
"trust": "0.9" "trust": "0.9"
}, },
"qualifier": { "qualifier": {
"classid": "", "classid": "FOS",
"classname": "", "classname": "Fields of Science and Technology classification",
"schemeid": "", "schemeid": "dnet:subject_classification_typologies",
"schemename": "" "schemename": "dnet:subject_classification_typologies"
}, },
"value": "doped silicon" "value": "Avicennia"
}, },
{ {
"dataInfo": { "dataInfo": {