Merge branch 'master' of https://code-repo.d4science.org/D-Net/dnet-hadoop into orcid-no-doi

This commit is contained in:
Enrico Ottonello 2021-02-25 18:45:02 +01:00
commit ca1800510a
19 changed files with 538 additions and 42 deletions

View File

@ -3,6 +3,9 @@ package eu.dnetlib.dhp.schema.common;
import static com.google.common.base.Preconditions.checkArgument;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
@ -473,4 +476,25 @@ public class ModelSupport {
private static <T extends Oaf> String idFnForOafEntity(T t) {
return ((OafEntity) t).getId();
}
public static final String ISO8601FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
public static String oldest(String dateA, String dateB) throws ParseException {
if (StringUtils.isBlank(dateA)) {
return dateB;
}
if (StringUtils.isBlank(dateB)) {
return dateA;
}
if (StringUtils.isNotBlank(dateA) && StringUtils.isNotBlank(dateB)) {
final Date a = new SimpleDateFormat(ISO8601FORMAT).parse(dateA);
final Date b = new SimpleDateFormat(ISO8601FORMAT).parse(dateB);
return a.before(b) ? dateA : dateB;
} else {
return null;
}
}
}

View File

@ -1,8 +1,11 @@
package eu.dnetlib.dhp.schema.oaf;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import static com.google.common.base.Preconditions.checkArgument;
import java.text.ParseException;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -106,7 +109,7 @@ public class Relation extends Oaf {
}
public Boolean getValidated() {
return validated;
return Objects.nonNull(validated) && validated;
}
public void setValidated(Boolean validated) {
@ -130,6 +133,13 @@ public class Relation extends Oaf {
Objects.equals(getSubRelType(), r.getSubRelType()), "subRelType(s) must be equal");
checkArgument(Objects.equals(getRelClass(), r.getRelClass()), "relClass(es) must be equal");
setValidated(getValidated() || r.getValidated());
try {
setValidationDate(ModelSupport.oldest(getValidationDate(), r.getValidationDate()));
} catch (ParseException e) {
throw new IllegalArgumentException(String.format("invalid validation date format in relation [s:%s, t:%s]: %s", getSource(), getTarget(), getValidationDate()));
}
super.mergeFrom(r);
}

View File

@ -63,6 +63,51 @@ public class MergeTest {
assertEquals(3, a.getSubject().size());
}
@Test
public void mergeRelationTest() {
Relation a = createRel(null, null);
Relation b = createRel(null, null);
a.mergeFrom(b);
assertEquals(a, b);
a = createRel(true, null);
b = createRel(null, null);
a.mergeFrom(b);
assertEquals(true, a.getValidated());
a = createRel(true, null);
b = createRel(false, null);
a.mergeFrom(b);
assertEquals(true, a.getValidated());
a = createRel(true, null);
b = createRel(true, "2016-04-05T12:41:19.202Z");
a.mergeFrom(b);
assertEquals("2016-04-05T12:41:19.202Z", a.getValidationDate());
}
@Test
public void mergeRelationTestParseException() {
assertThrows(IllegalArgumentException.class, () -> {
Relation a = createRel(true, "2016-04-05");
Relation b = createRel(true, "2016-04-05");
a.mergeFrom(b);
});
}
private Relation createRel(Boolean validated, String validationDate) {
Relation rel = new Relation();
rel.setSource("1");
rel.setTarget("2");
rel.setRelType("reltype");
rel.setSubRelType("subreltype");
rel.setRelClass("relclass");
rel.setValidated(validated);
rel.setValidationDate(validationDate);
return rel;
}
private KeyValue setKV(final String key, final String value) {
KeyValue k = new KeyValue();

View File

@ -136,7 +136,17 @@
</configuration>
</global>
<start to="ensure_output_dir"/>
<start to="resume_from"/>
<decision name="resume_from">
<switch>
<case to="ensure_output_dir">${wf:conf('resumeFrom') eq 'ensure_output_dir'}</case>
<case to="index_event_subset">${wf:conf('resumeFrom') eq 'index_event_subset'}</case>
<case to="stats">${wf:conf('resumeFrom') eq 'stats'}</case>
<case to="index_notifications">${wf:conf('resumeFrom') eq 'index_notifications'}</case>
<default to="ensure_output_dir"/>
</switch>
</decision>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
@ -505,6 +515,33 @@
<arg>--maxEventsForTopic</arg><arg>${maxIndexedEventsForDsAndTopic}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark>
<ok to="stats"/>
<error to="Kill"/>
</action>
<action name="stats">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>GenerateStatsJob</name>
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
<jar>dhp-broker-events-${projectVersion}.jar</jar>
<spark-opts>
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
</spark-opts>
<arg>--outputDir</arg><arg>${outputDir}</arg>
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark>
<ok to="index_notifications"/>
<error to="Kill"/>
</action>
@ -535,33 +572,6 @@
<arg>--esNodesWanOnly</arg><arg>${esNodesWanOnly}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark>
<ok to="stats"/>
<error to="Kill"/>
</action>
<action name="stats">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>GenerateStatsJob</name>
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
<jar>dhp-broker-events-${projectVersion}.jar</jar>
<spark-opts>
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
</spark-opts>
<arg>--outputDir</arg><arg>${outputDir}</arg>
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark>
<ok to="End"/>
<error to="Kill"/>
</action>

View File

@ -0,0 +1,18 @@
<configuration>
<property>
<name>jobTracker</name>
<value>yarnRM</value>
</property>
<property>
<name>nameNode</name>
<value>hdfs://nameservice1</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
</configuration>

View File

@ -0,0 +1,114 @@
<workflow-app name="create broker events" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>outputDir</name>
<description>the path where the the generated data will be stored</description>
</property>
<property>
<name>brokerApiBaseUrl</name>
<description>the url of the broker service api</description>
</property>
<property>
<name>brokerDbUrl</name>
<description>the url of the broker database</description>
</property>
<property>
<name>brokerDbUser</name>
<description>the user of the broker database</description>
</property>
<property>
<name>brokerDbPassword</name>
<description>the password of the broker database</description>
</property>
<property>
<name>sparkDriverMemory</name>
<description>memory for driver process</description>
</property>
<property>
<name>sparkExecutorMemory</name>
<description>memory for individual executor</description>
</property>
<property>
<name>sparkExecutorCores</name>
<description>number of cores used by single executor</description>
</property>
<property>
<name>oozieActionShareLibForSpark2</name>
<description>oozie action sharelib for spark 2.*</description>
</property>
<property>
<name>spark2ExtraListeners</name>
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
<description>spark 2.* extra listeners classname</description>
</property>
<property>
<name>spark2SqlQueryExecutionListeners</name>
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
<description>spark 2.* sql query execution listeners classname</description>
</property>
<property>
<name>spark2YarnHistoryServerAddress</name>
<description>spark 2.* yarn history server address</description>
</property>
<property>
<name>spark2EventLogDir</name>
<description>spark 2.* event log dir location</description>
</property>
</parameters>
<global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration>
</global>
<start to="stats"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="stats">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>GenerateStatsJob</name>
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
<jar>dhp-broker-events-${projectVersion}.jar</jar>
<spark-opts>
--executor-cores=${sparkExecutorCores}
--executor-memory=${sparkExecutorMemory}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840
</spark-opts>
<arg>--outputDir</arg><arg>${outputDir}</arg>
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark>
<ok to="End"/>
<error to="Kill"/>
</action>
<end name="End"/>
</workflow-app>

View File

@ -334,6 +334,7 @@ public class MappersTest {
assertValidId(p.getCollectedfrom().get(0).getKey());
System.out.println(p.getTitle().get(0).getValue());
assertTrue(StringUtils.isNotBlank(p.getTitle().get(0).getValue()));
}
@Test

View File

@ -46,6 +46,7 @@
<oaf:collectedFrom id="openaire____::crossref" name="Crossref"/>
<oaf:identifier identifierType="doi">10.1080/23744235.2020.1774644</oaf:identifier>
<oaf:journal eissn="2374-4243" ep="3" iss="" issn="2374-4235" sp="1" vol="">Infectious Diseases</oaf:journal>
<oaf:projectid validationDate="2020-12-07T11:15:59.627Z">corda__h2020::814530</oaf:projectid>
</metadata>
<about xmlns:oai="http://www.openarchives.org/OAI/2.0/">
<provenance xmlns="http://www.openarchives.org/OAI/2.0/provenance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/provenance http://www.openarchives.org/OAI/2.0/provenance.xsd">

View File

@ -27,6 +27,7 @@ import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
import eu.dnetlib.dhp.oa.provision.model.ProvisionModelSupport;
import eu.dnetlib.dhp.oa.provision.model.RelatedEntityWrapper;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.*;
import scala.Tuple2;
@ -272,11 +273,7 @@ public class CreateRelatedEntitiesJob_phase2 {
.filter(Objects::nonNull)
.map(Qualifier::getClassid)
.filter(StringUtils::isNotBlank)
.anyMatch(c -> "orcid".equals(c.toLowerCase()));
}
private static FilterFunction<JoinedEntity> filterEmptyEntityFn() {
return (FilterFunction<JoinedEntity>) v -> Objects.nonNull(v.getEntity());
.anyMatch(c -> c.toLowerCase().contains(ModelConstants.ORCID));
}
private static void removeOutputDir(SparkSession spark, String path) {

View File

@ -7,13 +7,16 @@ import java.util.Set;
import com.google.common.collect.Sets;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*;
public class GraphMappingUtils {
public static final String SEPARATOR = "_";
public static Set<String> authorPidTypes = Sets.newHashSet("orcid", "magidentifier");
public static Set<String> authorPidTypes = Sets
.newHashSet(
ModelConstants.ORCID, ModelConstants.ORCID_PENDING, "magidentifier");
public static String removePrefix(final String s) {
if (s.contains("|"))

View File

@ -73,7 +73,9 @@ public class TemplateFactory {
final Collection<String> fields,
final String semanticclass,
final String semantischeme,
final DataInfo info) {
final DataInfo info,
final boolean validated,
final String validationDate) {
return getTemplate(resources.getRel())
.add("type", type)
.add("objIdentifier", escapeXml(removePrefix(objIdentifier)))
@ -86,6 +88,8 @@ public class TemplateFactory {
.add(
"provenanceaction",
info.getProvenanceaction() != null ? info.getProvenanceaction().getClassid() : "")
.add("validated", validated)
.add("validationdate", validationDate)
.render();
}

View File

@ -1094,9 +1094,12 @@ public class XmlRecordFactory implements Serializable {
String.format("missing scheme for: <%s - %s>", type.toString(), targetType));
}
final HashSet<String> fields = Sets.newHashSet(mapFields(link, contexts));
if (rel.getValidated() == null)
rel.setValidated(false);
return templateFactory
.getRel(
targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo());
targetType, rel.getTarget(), fields, rel.getRelClass(), scheme, rel.getDataInfo(), rel.getValidated(),
rel.getValidationDate());
}
private List<String> listChildren(

View File

@ -1,4 +1,5 @@
<rel inferred="$inferred$" trust="$trust$" inferenceprovenance="$inferenceprovenance$" provenanceaction="$provenanceaction$">
$if(validated)$<validated date="$validationdate$"/>$else$$endif$
<to class="$class$" scheme="$scheme$" type="$type$">$objIdentifier$</to>
$metadata:{ it | $it$ }$
</rel>

View File

@ -5,22 +5,27 @@ import static org.junit.jupiter.api.Assertions.*;
import java.io.IOException;
import java.io.StringReader;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.io.SAXReader;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import eu.dnetlib.dhp.oa.provision.model.JoinedEntity;
import eu.dnetlib.dhp.oa.provision.model.RelatedEntity;
import eu.dnetlib.dhp.oa.provision.model.RelatedEntityWrapper;
import eu.dnetlib.dhp.oa.provision.utils.ContextMapper;
import eu.dnetlib.dhp.oa.provision.utils.XmlRecordFactory;
import eu.dnetlib.dhp.schema.oaf.Project;
import eu.dnetlib.dhp.schema.oaf.Publication;
import eu.dnetlib.dhp.schema.oaf.Relation;
public class XmlRecordFactoryTest {
@ -58,4 +63,67 @@ public class XmlRecordFactoryTest {
// TODO add assertions based of values extracted from the XML record
}
@Test
public void testXMLRecordFactoryWithValidatedProject() throws IOException, DocumentException {
ContextMapper contextMapper = new ContextMapper();
XmlRecordFactory xmlRecordFactory = new XmlRecordFactory(contextMapper, false, XmlConverterJob.schemaLocation,
otherDsTypeId);
Publication p = OBJECT_MAPPER
.readValue(IOUtils.toString(getClass().getResourceAsStream("publication.json")), Publication.class);
Project pj = OBJECT_MAPPER
.readValue(IOUtils.toString(getClass().getResourceAsStream("project.json")), Project.class);
Relation rel = OBJECT_MAPPER
.readValue(
(IOUtils.toString(getClass().getResourceAsStream("relToValidatedProject.json"))), Relation.class);
RelatedEntity relatedProject = CreateRelatedEntitiesJob_phase1.asRelatedEntity(pj, Project.class);
List<RelatedEntityWrapper> links = Lists.newArrayList();
RelatedEntityWrapper rew = new RelatedEntityWrapper(rel, relatedProject);
links.add(rew);
JoinedEntity je = new JoinedEntity<>(p);
je.setLinks(links);
String xml = xmlRecordFactory.build(je);
assertNotNull(xml);
Document doc = new SAXReader().read(new StringReader(xml));
assertNotNull(doc);
System.out.println(doc.asXML());
Assertions.assertEquals("2021-01-01", doc.valueOf("//validated/@date"));
}
@Test
public void testXMLRecordFactoryWithNonValidatedProject() throws IOException, DocumentException {
ContextMapper contextMapper = new ContextMapper();
XmlRecordFactory xmlRecordFactory = new XmlRecordFactory(contextMapper, false, XmlConverterJob.schemaLocation,
otherDsTypeId);
Publication p = OBJECT_MAPPER
.readValue(IOUtils.toString(getClass().getResourceAsStream("publication.json")), Publication.class);
Project pj = OBJECT_MAPPER
.readValue(IOUtils.toString(getClass().getResourceAsStream("project.json")), Project.class);
Relation rel = OBJECT_MAPPER
.readValue((IOUtils.toString(getClass().getResourceAsStream("relToProject.json"))), Relation.class);
RelatedEntity relatedProject = CreateRelatedEntitiesJob_phase1.asRelatedEntity(pj, Project.class);
List<RelatedEntityWrapper> links = Lists.newArrayList();
RelatedEntityWrapper rew = new RelatedEntityWrapper(rel, relatedProject);
links.add(rew);
JoinedEntity je = new JoinedEntity<>(p);
je.setLinks(links);
String xml = xmlRecordFactory.build(je);
assertNotNull(xml);
Document doc = new SAXReader().read(new StringReader(xml));
assertNotNull(doc);
System.out.println(doc.asXML());
assertEquals("", doc.valueOf("//rel/validated"));
}
}

View File

@ -61,7 +61,7 @@
<FIELD indexable="true" name="funderoriginalname" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/originalname"/>
<FIELD indexable="true" name="funderjurisdiction" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='project']/fundingtree/funder/jurisdiction"/><!-- RESULT FIELDS -->
<FIELD indexable="true" name="resulttitle" result="false" stat="false" xpath="//*[local-name() = 'entity']/*[local-name() ='result']/title | //*[local-name()='entity']/*[local-name()='result']/children/result/title"/>
<FIELD indexable="true" name="resultsubject" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject)"/>
<FIELD indexable="true" name="resultsubject" result="false" stat="false" type="string_ci" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject)"/>
<FIELD indexable="true" name="resultsubjectclass" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/subject/@classname)"/>
<FIELD indexable="true" multivalued="false" name="resultembargoenddate" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/embargoenddate"/>
<FIELD indexable="true" multivalued="false" name="resultembargoendyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/embargoenddate)"/>
@ -72,6 +72,7 @@
<FIELD indexable="true" name="resultdescription" result="false" stat="false" xpath="//*[local-name()='entity']/*[local-name()='result']//*[local-name()='description']"/>
<FIELD indexable="true" name="resultlicense" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/license"/>
<FIELD indexable="true" name="resultaccessright" result="false" stat="false" tokenizable="false" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/accessright/@classname"/>
<FIELD indexable="true" name="resultresourcetypename" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/resourcetype/@classname"/>
<FIELD indexable="true" multivalued="false" name="resultbestaccessright" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/bestaccessright/@classname)"/>
<FIELD indexable="true" multivalued="false" name="resultdateofacceptance" result="false" stat="false" type="pdate" value="//*[local-name()='entity']/*[local-name()='result']/dateofacceptance"/>
<FIELD indexable="true" multivalued="false" name="resultacceptanceyear" result="false" stat="false" tokenizable="false" value="dnet:extractYear(//*[local-name()='entity']/*[local-name()='result']/dateofacceptance)"/>
@ -79,6 +80,7 @@
<FIELD indexable="true" multivalued="true" name="resultauthor_nt" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator"/>
<FIELD indexable="true" multivalued="true" name="authorid" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']"/>
<FIELD indexable="true" multivalued="true" name="authoridtype" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator/@*[local-name() != 'rank' and local-name() != 'name' and local-name() != 'surname']/local-name()"/>
<FIELD indexable="true" multivalued="true" name="orcidtypevalue" result="false" stat="false" type="string_ci" xpath="//*[local-name()='entity']/*[local-name()='result']/creator" value="string-join((./@*[local-name() = 'orcid'], ./@*[local-name() = 'orcid']/local-name()), '||' )"/>
<FIELD indexable="true" name="resulthostingdatasource" result="false" stat="false" tokenizable="false" value="distinct-values(concat(./@id, '||', ./@name))" xpath="//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']"/>
<FIELD indexable="true" name="resulthostingdatasourceid" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@id)"/>
<FIELD indexable="true" name="resulthostingdatasourcename" result="false" stat="false" tokenizable="false" xpath="distinct-values(//*[local-name()='entity']/*[local-name()='result']/children/instance/*[local-name()='hostedby']/@name)"/>

View File

@ -0,0 +1,109 @@
{
"id": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
"originalId": [],
"pid": [],
"dateofcollection": "2020-01-01",
"dateoftransformation": "2020-01-01",
"extraInfo": [],
"oaiprovenance": null,
"websiteurl": {
"value": "https://web.site",
"datainfo": null
},
"code": {
"value": "79a0e",
"datainfo": null
},
"acronym": {
"value": "79a0e_acronym",
"datainfo": null
},
"title": {
"value": "79a0e_title",
"datainfo": null
},
"startdate": {
"value": "2019-02-01",
"datainfo": null
},
"enddate": {
"value": "2021-01-09",
"datainfo": null
},
"callidentifier": {
"value": "79a0e_callID",
"datainfo": null
},
"keywords": {
"value": "",
"datainfo": null
},
"duration": {
"value": "",
"datainfo": null
},
"ecsc39": {
"value": "true",
"datainfo": null
},
"oamandatepublications": {
"value": "true",
"datainfo": null
},
"ecarticle29_3": {
"value": "false",
"datainfo": null
},
"optional1": {
"value": "",
"datainfo": null
},
"optional2": {
"value": "",
"datainfo": null
},
"jsonextrainfo":{
"value": "",
"datainfo": null
},
"contactfullname":{
"value": "",
"datainfo": null
},
"contactfax": {
"value": "",
"datainfo": null
},
"contactphone": {
"value": "",
"datainfo": null
},
"contactemail": {
"value": "",
"datainfo": null
},
"summary": {
"value": "79a0e_description",
"datainfo": null
},
"currency": {
"value": "EUR",
"datainfo": null
},
"totalcost": 120000,
"fundedamount": 18000,
"h2020topiccode": "",
"h2020topicdescription": "",
"h2020classification": [],
"subjects": [
{
"value": "",
"qualifier": null,
"datainfo": null
}
],
"fundingtree": []
}

View File

@ -39,6 +39,8 @@
<creator rank="4">Saykally, Jessica N.</creator>
<creator rank="5">Keeley, Kristen L.</creator>
<creator rank="6">Haris Hatic</creator>
<creator rank="7" name="Miriam" surname="Baglioni" orcid_pending="0000-0002-2273-9004">Baglioni, Miriam</creator>
<creator rank="8" name="Michele" surname="De Bonis" orcid="0000-0002-2273-9004">De Bonis, Michele</creator>
<dateofacceptance>2017-06-01</dateofacceptance>
<description>Withania somnifera has been used in traditional medicine for a variety
of neural disorders. Recently, chronic neurodegenerative conditions have been
@ -115,7 +117,7 @@
<source>Cell Transplantation</source>
<resulttype classid="publication" classname="publication"
schemeid="dnet:result_typologies" schemename="dnet:result_typologies"/>
<resourcetype/>
<resourcetype classid="Book" classname="Book" schemeid="dnet:dataCite_resource" schemename="dnet:dataCite_resource" />
<journal issn="0963-6897" eissn="1555-3892" ep="1201" iss="7" sp="1193" vol="26"
>Cell Transplantation</journal>
<context id="NIH" label="National Institutes of Health" type="funding">
@ -292,6 +294,28 @@
>rcuk________::23feba2a5ca7f6b6016bf3a45180da50</to>
<legalname>University of Delhi</legalname>
</rel>
<rel inferred="false" trust="0.9" inferenceprovenance="" provenanceaction="sysimport:crosswalk:repository">
<validated date="2021-01-01">true</validated>
<to class="isProducedBy" scheme="dnet:result_project_relations" type="project">corda_______::30c6b5ab90f30666de1d112fb93d8c77</to>
<code>227878</code>
<funding>
<funder id="ec__________::EC" shortname="EC" name="European Commission" jurisdiction="EU" />
<funding_level_0 name="FP7">ec__________::EC::FP7</funding_level_0>
<funding_level_1 name="SP2">ec__________::EC::FP7::SP2</funding_level_1>
<funding_level_2 name="ERC">ec__________::EC::FP7::SP2::ERC</funding_level_2>
</funding>
<title>Complex structure and dynamics of collective motion</title>
<acronym>COLLMOT</acronym>
<contracttype classid="ERC" classname="Support for frontier research (ERC)" schemeid="ec:FP7contractTypes" schemename="ec:FP7contractTypes" />
</rel>
<rel inferred="true" trust="0.72" inferenceprovenance="iis::document_referencedProjects" provenanceaction="iis">
<to class="isProducedBy" scheme="dnet:result_project_relations" type="project">irb_hr______::2330a1d0dac71ffbe15fbcbc807288d4</to>
<code>108-1083570-3635</code>
<funding>
<funder id="irb_hr______::MZOS" shortname="MZOS" name="Ministry of Science, Education and Sports of the Republic of Croatia (MSES)" jurisdiction="HR" />
</funding>
<title>Pentadecapeptide BPC 157 - further investigations</title>
</rel>
</rels>
<children>
<instance id="openaire____::55045bd2a65019fd8e6741a755395c8c">

View File

@ -0,0 +1,31 @@
{
"collectedfrom": [
{
"key": "10|opendoar____::eccbc87e4b5ce2fe28308fd9f2a7baf3",
"value": "AMS Acta",
"dataInfo": null
}
],
"dataInfo": {
"invisible": false,
"inferred": false,
"deletedbyinference": false,
"trust": "0.9",
"inferenceprovenance": "",
"provenanceaction": {
"classid": "sysimport:crosswalk:repository",
"classname": "sysimport:crosswalk:repository",
"schemeid": "dnet:provenanceActions",
"schemename": "dnet:provenanceActions"
}
},
"lastupdatetimestamp": 1606898557407,
"relType": "resultProject",
"subRelType": "outcome",
"relClass": "isProducedBy",
"source": "50|CSC_________::0000ec4dd9df012feaafa77e71a0fb4c",
"target": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
"validated": null,
"validationDate": null,
"properties": []
}

View File

@ -0,0 +1,31 @@
{
"collectedfrom": [
{
"key": "10|opendoar____::eccbc87e4b5ce2fe28308fd9f2a7baf3",
"value": "AMS Acta",
"dataInfo": null
}
],
"dataInfo": {
"invisible": false,
"inferred": false,
"deletedbyinference": false,
"trust": "0.9",
"inferenceprovenance": "",
"provenanceaction": {
"classid": "sysimport:crosswalk:repository",
"classname": "sysimport:crosswalk:repository",
"schemeid": "dnet:provenanceActions",
"schemename": "dnet:provenanceActions"
}
},
"lastupdatetimestamp": 1606898557407,
"relType": "resultProject",
"subRelType": "outcome",
"relClass": "isProducedBy",
"source": "50|CSC_________::0000ec4dd9df012feaafa77e71a0fb4c",
"target": "40|corda__h2020::79a0e16c122c9a18eb60e4a5e64b620d",
"validated": true,
"validationDate": "2021-01-01",
"properties": []
}