merge with upstream

This commit is contained in:
Miriam Baglioni 2020-12-03 10:26:45 +01:00
commit 05c452f58d
8 changed files with 43 additions and 45 deletions

View File

@ -158,7 +158,7 @@ class CrossrefMappingTest {
rels.foreach(s => logger.info(s.getTarget)) rels.foreach(s => logger.info(s.getTarget))
assertEquals(rels.size, 3 ) assertEquals(rels.size, 6 )
} }

View File

@ -44,9 +44,6 @@ public class PropagationConstant {
public static final String PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID = "authorpid:result"; public static final String PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_ID = "authorpid:result";
public static final String PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME = "Propagation of authors pid to result through semantic relations"; public static final String PROPAGATION_ORCID_TO_RESULT_FROM_SEM_REL_CLASS_NAME = "Propagation of authors pid to result through semantic relations";
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final String cfHbforResultQuery = "select distinct r.id, inst.collectedfrom.key cf, inst.hostedby.key hb " private static final String cfHbforResultQuery = "select distinct r.id, inst.collectedfrom.key cf, inst.hostedby.key hb "

View File

@ -7,7 +7,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
@ -23,6 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson; import com.google.gson.Gson;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;

View File

@ -7,7 +7,6 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
@ -24,6 +23,7 @@ import com.google.common.collect.Lists;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.PacePerson; import eu.dnetlib.dhp.common.PacePerson;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Author; import eu.dnetlib.dhp.schema.oaf.Author;
import eu.dnetlib.dhp.schema.oaf.Result; import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.schema.oaf.StructuredProperty;

View File

@ -5,9 +5,7 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import com.cloudera.org.codehaus.jackson.map.jsontype.impl.ClassNameIdResolver;
import eu.dnetlib.dhp.PropagationConstant;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -22,8 +20,11 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.cloudera.org.codehaus.jackson.map.jsontype.impl.ClassNameIdResolver;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.PropagationConstant;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Dataset; import eu.dnetlib.dhp.schema.oaf.Dataset;
public class OrcidPropagationJobTest { public class OrcidPropagationJobTest {
@ -170,6 +171,7 @@ public class OrcidPropagationJobTest {
.filter( .filter(
"id = '50|dedup_wf_001::95b033c0c3961f6a1cdcd41a99a9632e' " "id = '50|dedup_wf_001::95b033c0c3961f6a1cdcd41a99a9632e' "
+ "and name = 'Vajinder' and surname = 'Kumar' and pidType = '" + + "and name = 'Vajinder' and surname = 'Kumar' and pidType = '" +
ModelConstants.ORCID_PENDING + "'") ModelConstants.ORCID_PENDING + "'")
.count()); .count());

View File

@ -192,7 +192,9 @@ public class CleaningFunctions {
final Set<String> collectedFrom = Optional final Set<String> collectedFrom = Optional
.ofNullable(r.getCollectedfrom()) .ofNullable(r.getCollectedfrom())
.map(c -> c.stream() .map(
c -> c
.stream()
.map(KeyValue::getKey) .map(KeyValue::getKey)
.collect(Collectors.toCollection(HashSet::new))) .collect(Collectors.toCollection(HashSet::new)))
.orElse(new HashSet<>()); .orElse(new HashSet<>());
@ -212,7 +214,8 @@ public class CleaningFunctions {
// hack to distinguish orcid from orcid_pending // hack to distinguish orcid from orcid_pending
String pidProvenance = Optional String pidProvenance = Optional
.ofNullable(p.getDataInfo()) .ofNullable(p.getDataInfo())
.map(d -> Optional .map(
d -> Optional
.ofNullable(d.getProvenanceaction()) .ofNullable(d.getProvenanceaction())
.map(Qualifier::getClassid) .map(Qualifier::getClassid)
.orElse("")) .orElse(""))

View File

@ -7,11 +7,6 @@ import java.io.IOException;
import java.io.StringReader; import java.io.StringReader;
import java.util.List; import java.util.List;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.OafEntity;
import eu.dnetlib.dhp.schema.oaf.OafMapperUtils;
import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
@ -20,13 +15,18 @@ import org.dom4j.io.SAXReader;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity; import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper; import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory; import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
import org.mockito.Mock; import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.OafEntity;
import eu.dnetlib.dhp.schema.oaf.OafMapperUtils;
import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
//TODO to enable it we need to update the joined_entity.json test file //TODO to enable it we need to update the joined_entity.json test file
//@Disabled //@Disabled
@ -47,8 +47,6 @@ public class XmlRecordFactoryTest {
//// TODO specific test assertion on doc //// TODO specific test assertion on doc
} }
@Test @Test
void testBologna() throws IOException, DocumentException { void testBologna() throws IOException, DocumentException {
final String json = IOUtils.toString(getClass().getResourceAsStream("oaf-bologna.json")); final String json = IOUtils.toString(getClass().getResourceAsStream("oaf-bologna.json"));
@ -63,8 +61,6 @@ public class XmlRecordFactoryTest {
System.out.println(doc.asXML()); System.out.println(doc.asXML());
} }
private Document buildXml(JoinedEntity je) throws DocumentException { private Document buildXml(JoinedEntity je) throws DocumentException {