fixed conflicts
This commit is contained in:
commit
f6677429c7
|
@ -40,6 +40,7 @@ public class ExecCreateSchemas {
|
|||
.get(Paths.get(getClass().getResource("/").getPath()).toAbsolutePath() + directory)
|
||||
.toString();
|
||||
|
||||
System.out.println(dir);
|
||||
if (!Files.exists(Paths.get(dir))) {
|
||||
Files.createDirectories(Paths.get(dir));
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import java.io.Serializable;
|
|||
* @author miriam.baglioni
|
||||
* @Date 07/11/22
|
||||
*/
|
||||
public class ImpactMeasures implements Serializable {
|
||||
public class ImpactIndicators implements Serializable {
|
||||
Score influence;
|
||||
Score influence_alt;
|
||||
Score popularity;
|
|
@ -9,18 +9,18 @@ import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
|
|||
|
||||
public class Indicator implements Serializable {
|
||||
@JsonSchema(description = "The impact measures (i.e. popularity)")
|
||||
ImpactMeasures impactMeasures;
|
||||
List<Score> bipIndicators;
|
||||
|
||||
@JsonSchema(description = "The usage counts (i.e. downloads)")
|
||||
UsageCounts usageCounts;
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public ImpactMeasures getImpactMeasures() {
|
||||
return impactMeasures;
|
||||
public List<Score> getBipIndicators() {
|
||||
return bipIndicators;
|
||||
}
|
||||
|
||||
public void setImpactMeasures(ImpactMeasures impactMeasures) {
|
||||
this.impactMeasures = impactMeasures;
|
||||
public void setBipIndicators(List<Score> bipIndicators) {
|
||||
this.bipIndicators = bipIndicators;
|
||||
}
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
|
|
|
@ -12,6 +12,7 @@ import com.fasterxml.jackson.annotation.JsonSetter;
|
|||
* @Date 07/11/22
|
||||
*/
|
||||
public class Score implements Serializable {
|
||||
private String indicator;
|
||||
private String score;
|
||||
|
||||
@JsonProperty("class")
|
||||
|
@ -34,4 +35,12 @@ public class Score implements Serializable {
|
|||
public void setClazz(String clazz) {
|
||||
this.clazz = clazz;
|
||||
}
|
||||
|
||||
public String getIndicator() {
|
||||
return indicator;
|
||||
}
|
||||
|
||||
public void setIndicator(String indicator) {
|
||||
this.indicator = indicator;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 07/11/22
|
||||
*/
|
||||
public class UsageCounts {
|
||||
public class UsageCounts implements Serializable {
|
||||
private String downloads;
|
||||
private String views;
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import java.util.List;
|
|||
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
|
||||
|
||||
import eu.dnetlib.dhp.oa.model.Container;
|
||||
import eu.dnetlib.dhp.oa.model.Indicator;
|
||||
|
||||
/**
|
||||
* To store information about the datasource OpenAIRE collects information from. It contains the following parameters: -
|
||||
|
@ -128,6 +129,17 @@ public class Datasource implements Serializable {
|
|||
@JsonSchema(description = "Information about the journal, if this data source is of type Journal.")
|
||||
private Container journal; // issn etc del Journal
|
||||
|
||||
// @JsonSchema(description = "Indicators computed for this Datasource, for example UsageCount ones")
|
||||
// private Indicator indicators;
|
||||
//
|
||||
// public Indicator getIndicators() {
|
||||
// return indicators;
|
||||
// }
|
||||
//
|
||||
// public void setIndicators(Indicator indicators) {
|
||||
// this.indicators = indicators;
|
||||
// }
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.model.graph;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* To represent the generic node in a relation. It has the following parameters: - private String id the openaire id of
|
||||
* the entity in the relation - private String type the type of the entity in the relation. Consider the generic
|
||||
* relation between a Result R and a Project P, the node representing R will have as id the id of R and as type result,
|
||||
* while the node representing the project will have as id the id of the project and as type project
|
||||
*/
|
||||
public class Node implements Serializable {
|
||||
private String id;
|
||||
private String type;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public static Node newInstance(String id, String type) {
|
||||
Node node = new Node();
|
||||
node.id = id;
|
||||
node.type = type;
|
||||
return node;
|
||||
}
|
||||
}
|
|
@ -6,6 +6,8 @@ import java.util.List;
|
|||
|
||||
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
|
||||
|
||||
import eu.dnetlib.dhp.oa.model.Indicator;
|
||||
|
||||
/**
|
||||
* This is the class representing the Project in the model used for the dumps of the whole graph. At the moment the dump
|
||||
* of the Projects differs from the other dumps because we do not create relations between Funders (Organization) and
|
||||
|
@ -68,6 +70,17 @@ public class Project implements Serializable {
|
|||
@JsonSchema(description = "The h2020 programme funding the project")
|
||||
private List<Programme> h2020programme;
|
||||
|
||||
// @JsonSchema(description = "Indicators computed for this project, for example UsageCount ones")
|
||||
// private Indicator indicators;
|
||||
//
|
||||
// public Indicator getIndicators() {
|
||||
// return indicators;
|
||||
// }
|
||||
//
|
||||
// public void setIndicators(Indicator indicators) {
|
||||
// this.indicators = indicators;
|
||||
// }
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
|
|
@ -15,11 +15,17 @@ import eu.dnetlib.dhp.oa.model.Provenance;
|
|||
* provenance of the relation
|
||||
*/
|
||||
public class Relation implements Serializable {
|
||||
@JsonSchema(description = "The node source in the relation")
|
||||
private Node source;
|
||||
@JsonSchema(description = "The identifier of the source in the relation")
|
||||
private String source;
|
||||
|
||||
@JsonSchema(description = "The node target in the relation")
|
||||
private Node target;
|
||||
@JsonSchema(description = "The entity type of the source in the relation")
|
||||
private String sourceType;
|
||||
|
||||
@JsonSchema(description = "The identifier of the target in the relation")
|
||||
private String target;
|
||||
|
||||
@JsonSchema(description = "The entity type of the target in the relation")
|
||||
private String targetType;
|
||||
|
||||
@JsonSchema(description = "To represent the semantics of a relation between two entities")
|
||||
private RelType reltype;
|
||||
|
@ -34,22 +40,38 @@ public class Relation implements Serializable {
|
|||
@JsonSchema(description = "The date when the relation was collected from OpenAIRE")
|
||||
private String validationDate;
|
||||
|
||||
public Node getSource() {
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public void setSource(Node source) {
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public Node getTarget() {
|
||||
public String getSourceType() {
|
||||
return sourceType;
|
||||
}
|
||||
|
||||
public void setSourceType(String sourceType) {
|
||||
this.sourceType = sourceType;
|
||||
}
|
||||
|
||||
public String getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(Node target) {
|
||||
public void setTarget(String target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getTargetType() {
|
||||
return targetType;
|
||||
}
|
||||
|
||||
public void setTargetType(String targetType) {
|
||||
this.targetType = targetType;
|
||||
}
|
||||
|
||||
public RelType getReltype() {
|
||||
return reltype;
|
||||
}
|
||||
|
@ -85,13 +107,16 @@ public class Relation implements Serializable {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
return Objects.hash(source.getId(), target.getId(), reltype.getType() + ":" + reltype.getName());
|
||||
return Objects.hash(source, target, reltype.getType() + ":" + reltype.getName());
|
||||
}
|
||||
|
||||
public static Relation newInstance(Node source, Node target, RelType reltype, Provenance provenance) {
|
||||
public static Relation newInstance(String source, String sourceType, String target, String targetType,
|
||||
RelType reltype, Provenance provenance) {
|
||||
Relation relation = new Relation();
|
||||
relation.source = source;
|
||||
relation.sourceType = sourceType;
|
||||
relation.target = target;
|
||||
relation.targetType = targetType;
|
||||
relation.reltype = reltype;
|
||||
relation.provenance = provenance;
|
||||
return relation;
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
},
|
||||
"id" : {
|
||||
"type" : "string",
|
||||
"description": "OpenAIRE id of the research community/research infrastructure"
|
||||
"description" : "The OpenAIRE id for the community/research infrastructure"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
|
@ -21,7 +21,10 @@
|
|||
"subject" : {
|
||||
"description" : "Only for research communities: the list of the subjects associated to the research community",
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for research communities: the list of the subjects associated to the research community"
|
||||
}
|
||||
},
|
||||
"type" : {
|
||||
"type" : "string",
|
||||
|
|
|
@ -0,0 +1,621 @@
|
|||
{
|
||||
"$schema" : "http://json-schema.org/draft-07/schema#",
|
||||
"definitions" : {
|
||||
"CfHbKeyValue" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"key" : {
|
||||
"type" : "string",
|
||||
"description" : "the OpenAIRE identifier of the data source"
|
||||
},
|
||||
"value" : {
|
||||
"type" : "string",
|
||||
"description" : "the name of the data source"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Provenance" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"trust" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResultPid" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "The scheme of the persistent identifier for the result (i.e. doi). If the pid is here it means the information for the pid has been collected from an authority for that pid type (i.e. Crossref/Datacite for doi). The set of authoritative pid is: doi when collected from Crossref or Datacite pmid when collected from EuroPubmed, arxiv when collected from arXiv, handle from the repositories"
|
||||
},
|
||||
"value" : {
|
||||
"type" : "string",
|
||||
"description" : "The value expressed in the scheme (i.e. 10.1000/182)"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"author" : {
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"fullname" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"id" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "The author's pid scheme. OpenAIRE currently supports 'ORCID'"
|
||||
},
|
||||
"value" : {
|
||||
"type" : "string",
|
||||
"description" : "The author's pid value in that scheme (i.e. 0000-1111-2222-3333)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "The reason why the pid was associated to the author"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The author's persistent identifiers"
|
||||
},
|
||||
"rank" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"surname" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"bestaccessright" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "COAR access mode code: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
||||
},
|
||||
"label" : {
|
||||
"type" : "string",
|
||||
"description" : "Label for the access mode"
|
||||
},
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "Scheme of reference for access right code. Always set to COAR access rights vocabulary: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
||||
}
|
||||
},
|
||||
"description" : "The openest of the access rights of this result."
|
||||
},
|
||||
"codeRepositoryUrl" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'software': the URL to the repository with the source code"
|
||||
},
|
||||
"collectedfrom" : {
|
||||
"description" : "Information about the sources from which the record has been collected",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the sources from which the record has been collected"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"contactgroup" : {
|
||||
"description" : "Only for results with type 'software': Information on the group responsible for providing further information regarding the resource",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'software': Information on the group responsible for providing further information regarding the resource"
|
||||
}
|
||||
},
|
||||
"contactperson" : {
|
||||
"description" : "Only for results with type 'software': Information on the person responsible for providing further information regarding the resource",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'software': Information on the person responsible for providing further information regarding the resource"
|
||||
}
|
||||
},
|
||||
"container" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"conferencedate" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"conferenceplace" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"edition" : {
|
||||
"type" : "string",
|
||||
"description" : "Edition of the journal or conference proceeding"
|
||||
},
|
||||
"ep" : {
|
||||
"type" : "string",
|
||||
"description" : "End page"
|
||||
},
|
||||
"iss" : {
|
||||
"type" : "string",
|
||||
"description" : "Journal issue number"
|
||||
},
|
||||
"issnLinking" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnOnline" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnPrinted" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
"description" : "Name of the journal or conference"
|
||||
},
|
||||
"sp" : {
|
||||
"type" : "string",
|
||||
"description" : "Start page"
|
||||
},
|
||||
"vol" : {
|
||||
"type" : "string",
|
||||
"description" : "Volume"
|
||||
}
|
||||
},
|
||||
"description" : "Container has information about the conference or journal where the result has been presented or published"
|
||||
},
|
||||
"context" : {
|
||||
"description" : "Reference to a relevant research infrastructure, initiative or community (RI/RC) among those collaborating with OpenAIRE. Please see https://connect.openaire.eu",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "Code identifying the RI/RC"
|
||||
},
|
||||
"label" : {
|
||||
"type" : "string",
|
||||
"description" : "Label of the RI/RC"
|
||||
},
|
||||
"provenance" : {
|
||||
"description" : "Why this result is associated to the RI/RC.",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this result is associated to the RI/RC."
|
||||
} ]
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "Reference to a relevant research infrastructure, initiative or community (RI/RC) among those collaborating with OpenAIRE. Please see https://connect.openaire.eu"
|
||||
}
|
||||
},
|
||||
"contributor" : {
|
||||
"description" : "Contributors for the result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Contributors for the result"
|
||||
}
|
||||
},
|
||||
"country" : {
|
||||
"description" : "The list of countries associated to this result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "ISO 3166-1 alpha-2 country code (i.e. IT)"
|
||||
},
|
||||
"label" : {
|
||||
"type" : "string",
|
||||
"description" : "The label for that code (i.e. Italy)"
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this result is associated to the country."
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The list of countries associated to this result"
|
||||
}
|
||||
},
|
||||
"coverage" : {
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"dateofcollection" : {
|
||||
"type" : "string",
|
||||
"description" : "When OpenAIRE collected the record the last time"
|
||||
},
|
||||
"description" : {
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"documentationUrl" : {
|
||||
"description" : "Only for results with type 'software': URL to the software documentation",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'software': URL to the software documentation"
|
||||
}
|
||||
},
|
||||
"embargoenddate" : {
|
||||
"type" : "string",
|
||||
"description" : "Date when the embargo ends and this result turns Open Access"
|
||||
},
|
||||
"format" : {
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"geolocation" : {
|
||||
"description" : "Geolocation information",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"box" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"place" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"point" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "Geolocation information"
|
||||
}
|
||||
},
|
||||
"id" : {
|
||||
"type" : "string",
|
||||
"description" : "The OpenAIRE identifiers for this result"
|
||||
},
|
||||
"indicators" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"bipIndicators" : {
|
||||
"description" : "The impact measures (i.e. popularity)",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"clazz" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"indicator" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"score" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The impact measures (i.e. popularity)"
|
||||
}
|
||||
},
|
||||
"usageCounts" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"downloads" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"views" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The usage counts (i.e. downloads)"
|
||||
}
|
||||
},
|
||||
"description" : "Indicators computed for this result, for example UsageCount ones"
|
||||
},
|
||||
"instance" : {
|
||||
"description" : "Each instance is one specific materialisation or version of the result. For example, you can have one result with three instance: one is the pre-print, one is the post-print, one is te published version",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"accessright" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "COAR access mode code: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
||||
},
|
||||
"label" : {
|
||||
"type" : "string",
|
||||
"description" : "Label for the access mode"
|
||||
},
|
||||
"openAccessRoute" : {
|
||||
"type" : "string",
|
||||
"enum" : [ "gold", "green", "hybrid", "bronze" ]
|
||||
},
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "Scheme of reference for access right code. Always set to COAR access rights vocabulary: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
||||
}
|
||||
},
|
||||
"description" : "The accessRights for this materialization of the result"
|
||||
},
|
||||
"alternateIdentifier" : {
|
||||
"description" : "All the identifiers other than pids forged by an authorithy for the pid type (i.e. Crossref for DOIs",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "The scheme of the identifier. It can be a persistent identifier (i.e. doi). If it is present in the alternate identifiers it means it has not been forged by an authority for that pid. For example we collect metadata from an institutional repository that provides as identifier for the result also the doi"
|
||||
},
|
||||
"value" : {
|
||||
"type" : "string",
|
||||
"description" : "The value expressed in the scheme"
|
||||
}
|
||||
},
|
||||
"description" : "All the identifiers other than pids forged by an authorithy for the pid type (i.e. Crossref for DOIs"
|
||||
}
|
||||
},
|
||||
"articleprocessingcharge" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"amount" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"currency" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The money spent to make this book or article available in Open Access. Source for this information is the OpenAPC initiative."
|
||||
},
|
||||
"collectedfrom" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the source from which the record has been collected"
|
||||
} ]
|
||||
},
|
||||
"hostedby" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the source from which the instance can be viewed or downloaded."
|
||||
} ]
|
||||
},
|
||||
"license" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}
|
||||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
"description" : "Date of the research product"
|
||||
},
|
||||
"refereed" : {
|
||||
"type" : "string",
|
||||
"description" : "If this instance has been peer-reviewed or not. Allowed values are peerReviewed, nonPeerReviewed, UNKNOWN (as defined in https://api.openaire.eu/vocabularies/dnet:review_levels)"
|
||||
},
|
||||
"type" : {
|
||||
"type" : "string",
|
||||
"description" : "The specific sub-type of this instance (see https://api.openaire.eu/vocabularies/dnet:result_typologies following the links)"
|
||||
},
|
||||
"url" : {
|
||||
"description" : "URLs to the instance. They may link to the actual full-text or to the landing page at the hosting source. ",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "URLs to the instance. They may link to the actual full-text or to the landing page at the hosting source. "
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "Each instance is one specific materialisation or version of the result. For example, you can have one result with three instance: one is the pre-print, one is the post-print, one is te published version"
|
||||
}
|
||||
},
|
||||
"language" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "alpha-3/ISO 639-2 code of the language"
|
||||
},
|
||||
"label" : {
|
||||
"type" : "string",
|
||||
"description" : "Language label in English"
|
||||
}
|
||||
}
|
||||
},
|
||||
"lastupdatetimestamp" : {
|
||||
"type" : "integer",
|
||||
"description" : "Timestamp of last update of the record in OpenAIRE"
|
||||
},
|
||||
"maintitle" : {
|
||||
"type" : "string",
|
||||
"description" : "A name or title by which a scientific result is known. May be the title of a publication, of a dataset or the name of a piece of software."
|
||||
},
|
||||
"originalId" : {
|
||||
"description" : "Identifiers of the record at the original sources",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Identifiers of the record at the original sources"
|
||||
}
|
||||
},
|
||||
"pid" : {
|
||||
"description" : "Persistent identifiers of the result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}, {
|
||||
"description" : "Persistent identifiers of the result"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"programmingLanguage" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'software': the programming language"
|
||||
},
|
||||
"projects" : {
|
||||
"description" : "List of projects (i.e. grants) that (co-)funded the production ofn the research results",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"acronym" : {
|
||||
"type" : "string",
|
||||
"description" : "The acronym of the project"
|
||||
},
|
||||
"code" : {
|
||||
"type" : "string",
|
||||
"description" : "The grant agreement number"
|
||||
},
|
||||
"funder" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"fundingStream" : {
|
||||
"type" : "string",
|
||||
"description" : "Stream of funding (e.g. for European Commission can be H2020 or FP7)"
|
||||
},
|
||||
"jurisdiction" : {
|
||||
"type" : "string",
|
||||
"description" : "Geographical jurisdiction (e.g. for European Commission is EU, for Croatian Science Foundation is HR)"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
"description" : "The name of the funder (European Commission)"
|
||||
},
|
||||
"shortName" : {
|
||||
"type" : "string",
|
||||
"description" : "The short name of the funder (EC)"
|
||||
}
|
||||
},
|
||||
"description" : "Information about the funder funding the project"
|
||||
},
|
||||
"id" : {
|
||||
"type" : "string",
|
||||
"description" : "The OpenAIRE id for the project"
|
||||
},
|
||||
"provenance" : {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
},
|
||||
"title" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"validated" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"validatedByFunder" : {
|
||||
"type" : "boolean"
|
||||
},
|
||||
"validationDate" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "List of projects (i.e. grants) that (co-)funded the production ofn the research results"
|
||||
}
|
||||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
"description" : "Main date of the research product: typically the publication or issued date. In case of a research result with different versions with different dates, the date of the result is selected as the most frequent well-formatted date. If not available, then the most recent and complete date among those that are well-formatted. For statistics, the year is extracted and the result is counted only among the result of that year. Example: Pre-print date: 2019-02-03, Article date provided by repository: 2020-02, Article date provided by Crossref: 2020, OpenAIRE will set as date 2019-02-03, because it’s the most recent among the complete and well-formed dates. If then the repository updates the metadata and set a complete date (e.g. 2020-02-12), then this will be the new date for the result because it becomes the most recent most complete date. However, if OpenAIRE then collects the pre-print from another repository with date 2019-02-03, then this will be the “winning date” because it becomes the most frequent well-formatted date."
|
||||
},
|
||||
"publisher" : {
|
||||
"type" : "string",
|
||||
"description" : "The name of the entity that holds, archives, publishes prints, distributes, releases, issues, or produces the resource."
|
||||
},
|
||||
"size" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'dataset': the declared size of the dataset"
|
||||
},
|
||||
"source" : {
|
||||
"description" : "See definition of Dublin Core field dc:source",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "See definition of Dublin Core field dc:source"
|
||||
}
|
||||
},
|
||||
"subjects" : {
|
||||
"description" : "Keywords associated to the result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this subject is associated to the result"
|
||||
} ]
|
||||
},
|
||||
"subject" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
"description" : "OpenAIRE subject classification scheme (https://api.openaire.eu/vocabularies/dnet:subject_classification_typologies)."
|
||||
},
|
||||
"value" : {
|
||||
"type" : "string",
|
||||
"description" : "The value for the subject in the selected scheme. When the scheme is 'keyword', it means that the subject is free-text (i.e. not a term from a controlled vocabulary)."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "Keywords associated to the result"
|
||||
}
|
||||
},
|
||||
"subtitle" : {
|
||||
"type" : "string",
|
||||
"description" : "Explanatory or alternative name by which a scientific result is known."
|
||||
},
|
||||
"tool" : {
|
||||
"description" : "Only for results with type 'other': tool useful for the interpretation and/or re-used of the research product",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "string",
|
||||
"description" : "Only for results with type 'other': tool useful for the interpretation and/or re-used of the research product"
|
||||
}
|
||||
},
|
||||
"type" : {
|
||||
"type" : "string",
|
||||
"description" : "Type of the result: one of 'publication', 'dataset', 'software', 'other' (see also https://api.openaire.eu/vocabularies/dnet:result_typologies)"
|
||||
},
|
||||
"version" : {
|
||||
"type" : "string",
|
||||
"description" : "Version of the result"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -48,7 +48,9 @@
|
|||
"type" : "string",
|
||||
"description" : "The date of last validation against the OpenAIRE guidelines for the datasource records"
|
||||
},
|
||||
"description": {"type": "string"},
|
||||
"description" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"englishname" : {
|
||||
"type" : "string",
|
||||
"description" : "The English name of the datasource"
|
||||
|
@ -60,8 +62,12 @@
|
|||
"journal" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"conferencedate": {"type": "string"},
|
||||
"conferenceplace": {"type": "string"},
|
||||
"conferencedate" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"conferenceplace" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"edition" : {
|
||||
"type" : "string",
|
||||
"description" : "Edition of the journal or conference proceeding"
|
||||
|
@ -74,9 +80,15 @@
|
|||
"type" : "string",
|
||||
"description" : "Journal issue number"
|
||||
},
|
||||
"issnLinking": {"type": "string"},
|
||||
"issnOnline": {"type": "string"},
|
||||
"issnPrinted": {"type": "string"},
|
||||
"issnLinking" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnOnline" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnPrinted" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
"description" : "Name of the journal or conference"
|
||||
|
@ -100,7 +112,9 @@
|
|||
"description" : "The languages present in the data source's content, as defined by OpenDOAR."
|
||||
}
|
||||
},
|
||||
"logourl": {"type": "string"},
|
||||
"logourl" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"missionstatementurl" : {
|
||||
"type" : "string",
|
||||
"description" : "The URL of a mission statement describing the designated community of the data source. As defined by re3data.org"
|
||||
|
@ -175,6 +189,8 @@
|
|||
"type" : "boolean",
|
||||
"description" : "As defined by redata.org: 'yes' if the data source supports versioning, 'no' otherwise."
|
||||
},
|
||||
"websiteurl": {"type": "string"}
|
||||
"websiteurl" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,8 +17,12 @@
|
|||
"Provenance" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance": {"type": "string"},
|
||||
"trust": {"type": "string"}
|
||||
"provenance" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"trust" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResultPid" : {
|
||||
|
@ -33,13 +37,6 @@
|
|||
"description" : "The value expressed in the scheme (i.e. 10.1000/182)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Score": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"clazz": {"type": "string"},
|
||||
"score": {"type": "string"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"type" : "object",
|
||||
|
@ -49,8 +46,12 @@
|
|||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"fullname": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"fullname" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
|
@ -68,16 +69,21 @@
|
|||
}
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "The reason why the pid was associated to the author"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "The reason why the pid was associated to the author"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The author's persistent identifiers"
|
||||
},
|
||||
"rank": {"type": "integer"},
|
||||
"surname": {"type": "string"}
|
||||
"rank" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"surname" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -107,10 +113,11 @@
|
|||
"description" : "Information about the sources from which the record has been collected",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/CfHbKeyValue"},
|
||||
{"description": "Information about the sources from which the record has been collected"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the sources from which the record has been collected"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"contactgroup" : {
|
||||
|
@ -132,8 +139,12 @@
|
|||
"container" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"conferencedate": {"type": "string"},
|
||||
"conferenceplace": {"type": "string"},
|
||||
"conferencedate" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"conferenceplace" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"edition" : {
|
||||
"type" : "string",
|
||||
"description" : "Edition of the journal or conference proceeding"
|
||||
|
@ -146,9 +157,15 @@
|
|||
"type" : "string",
|
||||
"description" : "Journal issue number"
|
||||
},
|
||||
"issnLinking": {"type": "string"},
|
||||
"issnOnline": {"type": "string"},
|
||||
"issnPrinted": {"type": "string"},
|
||||
"issnLinking" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnOnline" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnPrinted" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
"description" : "Name of the journal or conference"
|
||||
|
@ -182,10 +199,11 @@
|
|||
"description" : "Why this result is associated to the RI/RC.",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "Why this result is associated to the RI/RC."}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this result is associated to the RI/RC."
|
||||
} ]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -215,10 +233,11 @@
|
|||
"description" : "The label for that code (i.e. Italy)"
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "Why this result is associated to the country."}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this result is associated to the country."
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The list of countries associated to this result"
|
||||
|
@ -226,7 +245,9 @@
|
|||
},
|
||||
"coverage" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"dateofcollection" : {
|
||||
"type" : "string",
|
||||
|
@ -234,7 +255,9 @@
|
|||
},
|
||||
"description" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"documentationUrl" : {
|
||||
"description" : "Only for results with type 'software': URL to the software documentation",
|
||||
|
@ -250,7 +273,9 @@
|
|||
},
|
||||
"format" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"geolocation" : {
|
||||
"description" : "Geolocation information",
|
||||
|
@ -258,9 +283,15 @@
|
|||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"box": {"type": "string"},
|
||||
"place": {"type": "string"},
|
||||
"point": {"type": "string"}
|
||||
"box" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"place" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"point" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "Geolocation information"
|
||||
}
|
||||
|
@ -272,22 +303,34 @@
|
|||
"indicators" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"impactMeasures": {
|
||||
"bipIndicators" : {
|
||||
"description" : "The impact measures (i.e. popularity)",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"impulse": {"$ref": "#/definitions/Score"},
|
||||
"influence": {"$ref": "#/definitions/Score"},
|
||||
"influence_alt": {"$ref": "#/definitions/Score"},
|
||||
"popularity": {"$ref": "#/definitions/Score"},
|
||||
"popularity_alt": {"$ref": "#/definitions/Score"}
|
||||
"clazz" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"indicator" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"score" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The impact measures (i.e. popularity)"
|
||||
}
|
||||
},
|
||||
"usageCounts" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"downloads": {"type": "string"},
|
||||
"views": {"type": "string"}
|
||||
"downloads" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"views" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The usage counts (i.e. downloads)"
|
||||
}
|
||||
|
@ -313,12 +356,7 @@
|
|||
},
|
||||
"openAccessRoute" : {
|
||||
"type" : "string",
|
||||
"enum": [
|
||||
"gold",
|
||||
"green",
|
||||
"hybrid",
|
||||
"bronze"
|
||||
]
|
||||
"enum" : [ "gold", "green", "hybrid", "bronze" ]
|
||||
},
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
|
@ -348,27 +386,37 @@
|
|||
"articleprocessingcharge" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"amount": {"type": "string"},
|
||||
"currency": {"type": "string"}
|
||||
"amount" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"currency" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The money spent to make this book or article available in Open Access. Source for this information is the OpenAPC initiative."
|
||||
},
|
||||
"collectedfrom" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/CfHbKeyValue"},
|
||||
{"description": "Information about the source from which the record has been collected"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the source from which the record has been collected"
|
||||
} ]
|
||||
},
|
||||
"hostedby" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/CfHbKeyValue"},
|
||||
{"description": "Information about the source from which the instance can be viewed or downloaded."}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/CfHbKeyValue"
|
||||
}, {
|
||||
"description" : "Information about the source from which the instance can be viewed or downloaded."
|
||||
} ]
|
||||
},
|
||||
"license" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"license": {"type": "string"},
|
||||
"pid" : {
|
||||
"type" : "array",
|
||||
"items": {"$ref": "#/definitions/ResultPid"}
|
||||
"items" : {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}
|
||||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
|
@ -427,10 +475,11 @@
|
|||
"description" : "Persistent identifiers of the result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/ResultPid"},
|
||||
{"description": "Persistent identifiers of the result"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}, {
|
||||
"description" : "Persistent identifiers of the result"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"programmingLanguage" : {
|
||||
|
@ -477,13 +526,21 @@
|
|||
"type" : "string",
|
||||
"description" : "The OpenAIRE id for the project"
|
||||
},
|
||||
"provenance": {"$ref": "#/definitions/Provenance"},
|
||||
"title": {"type": "string"},
|
||||
"provenance" : {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
},
|
||||
"title" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"validated" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"validatedByFunder": {"type": "boolean"},
|
||||
"validationDate": {"type": "string"}
|
||||
"validatedByFunder" : {
|
||||
"type" : "boolean"
|
||||
},
|
||||
"validationDate" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -492,7 +549,7 @@
|
|||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
"description": "Main date of the research product: typically the publication or issued date. In case of a research result with different versions with different dates, the date of the result is selected as the most frequent well-formatted date. If not available, then the most recent and complete date among those that are well-formatted. For statistics, the year is extracted and the result is counted only among the result of that year. Example: Pre-print date: 2019-02-03, Article date provided by repository: 2020-02, Article date provided by Crossref: 2020, OpenAIRE will set as date 2019-02-03, because it\u2019s the most recent among the complete and well-formed dates. If then the repository updates the metadata and set a complete date (e.g. 2020-02-12), then this will be the new date for the result because it becomes the most recent most complete date. However, if OpenAIRE then collects the pre-print from another repository with date 2019-02-03, then this will be the \u201cwinning date\u201d because it becomes the most frequent well-formatted date."
|
||||
"description" : "Main date of the research product: typically the publication or issued date. In case of a research result with different versions with different dates, the date of the result is selected as the most frequent well-formatted date. If not available, then the most recent and complete date among those that are well-formatted. For statistics, the year is extracted and the result is counted only among the result of that year. Example: Pre-print date: 2019-02-03, Article date provided by repository: 2020-02, Article date provided by Crossref: 2020, OpenAIRE will set as date 2019-02-03, because it’s the most recent among the complete and well-formed dates. If then the repository updates the metadata and set a complete date (e.g. 2020-02-12), then this will be the new date for the result because it becomes the most recent most complete date. However, if OpenAIRE then collects the pre-print from another repository with date 2019-02-03, then this will be the “winning date” because it becomes the most frequent well-formatted date."
|
||||
},
|
||||
"publisher" : {
|
||||
"type" : "string",
|
||||
|
@ -517,10 +574,11 @@
|
|||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "Why this subject is associated to the result"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this subject is associated to the result"
|
||||
} ]
|
||||
},
|
||||
"subject" : {
|
||||
"type" : "object",
|
||||
|
|
|
@ -28,8 +28,12 @@
|
|||
"type" : "string",
|
||||
"description" : "The OpenAIRE id for the organisation"
|
||||
},
|
||||
"legalname": {"type": "string"},
|
||||
"legalshortname": {"type": "string"},
|
||||
"legalname" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"legalshortname" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"description" : "Persistent identifiers for the organisation i.e. isni 0000000090326370",
|
||||
"type" : "array",
|
||||
|
@ -48,6 +52,8 @@
|
|||
"description" : "Persistent identifiers for the organisation i.e. isni 0000000090326370"
|
||||
}
|
||||
},
|
||||
"websiteurl": {"type": "string"}
|
||||
"websiteurl" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,8 +9,7 @@
|
|||
"type" : "string"
|
||||
},
|
||||
"code" : {
|
||||
"type": "string",
|
||||
"description": "The grant agreement number"
|
||||
"type" : "string"
|
||||
},
|
||||
"enddate" : {
|
||||
"type" : "string"
|
||||
|
@ -25,18 +24,18 @@
|
|||
"type" : "object",
|
||||
"properties" : {
|
||||
"description" : {
|
||||
"type": "string",
|
||||
"description": "Description of the funding stream"
|
||||
"type" : "string"
|
||||
},
|
||||
"id" : {
|
||||
"type" : "string",
|
||||
"description" : "Id of the funding stream"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "Description of the funding stream"
|
||||
},
|
||||
"jurisdiction" : {
|
||||
"type" : "string",
|
||||
"description": "The jurisdiction of the funder (i.e. EU)"
|
||||
"description" : "Geographical jurisdiction (e.g. for European Commission is EU, for Croatian Science Foundation is HR)"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
|
@ -46,7 +45,8 @@
|
|||
"type" : "string",
|
||||
"description" : "The short name of the funder (EC)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "Funding information for the project"
|
||||
}
|
||||
},
|
||||
"granted" : {
|
||||
|
@ -81,12 +81,12 @@
|
|||
"type" : "string",
|
||||
"description" : "The description of the programme"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description" : "The h2020 programme funding the project"
|
||||
}
|
||||
},
|
||||
"id" : {
|
||||
"type": "string",
|
||||
"description": "OpenAIRE id for the project"
|
||||
"type" : "string"
|
||||
},
|
||||
"keywords" : {
|
||||
"type" : "string"
|
||||
|
|
|
@ -1,60 +1,46 @@
|
|||
{
|
||||
"$schema" : "http://json-schema.org/draft-07/schema#",
|
||||
"definitions": {
|
||||
"Node": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "The OpenAIRE id of the entity"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"description": "The type of the entity (i.e. organisation)"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"type": "string",
|
||||
"description": "The reason why OpenAIRE holds the relation "
|
||||
"type" : "string"
|
||||
},
|
||||
"trust" : {
|
||||
"type": "string",
|
||||
"description": "The trust of the relation in the range of [0,1]. Where greater the number, more the trust. Harvested relationships have typically a high trust (0.9). The trust of inferred relationship is calculated by the inference algorithm that generated them, as described in https://graph.openaire.eu/about#architecture (Enrichment --> Mining)"
|
||||
}
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The reason why OpenAIRE holds the relation "
|
||||
},
|
||||
"reltype" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"name" : {
|
||||
"type": "string",
|
||||
"description": "The semantics of the relation (i.e. isAuthorInstitutionOf). "
|
||||
"type" : "string"
|
||||
},
|
||||
"type" : {
|
||||
"type": "string",
|
||||
"description": "the type of the relation (i.e. affiliation)"
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "To represent the semantics of a relation between two entities"
|
||||
},
|
||||
"source" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Node"},
|
||||
{"description": "The node source in the relation"}
|
||||
]
|
||||
"type" : "string",
|
||||
"description" : "The identifier of the source in the relation"
|
||||
},
|
||||
"sourceType" : {
|
||||
"type" : "string",
|
||||
"description" : "The entity type of the source in the relation"
|
||||
},
|
||||
"target" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Node"},
|
||||
{"description": "The node target in the relation"}
|
||||
]
|
||||
"type" : "string",
|
||||
"description" : "The identifier of the target in the relation"
|
||||
},
|
||||
"targetType" : {
|
||||
"type" : "string",
|
||||
"description" : "The entity type of the target in the relation"
|
||||
},
|
||||
"validated" : {
|
||||
"type" : "boolean",
|
||||
|
|
|
@ -4,8 +4,12 @@
|
|||
"Provenance" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance": {"type": "string"},
|
||||
"trust": {"type": "string"}
|
||||
"provenance" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"trust" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ResultPid" : {
|
||||
|
@ -20,13 +24,6 @@
|
|||
"description" : "The value expressed in the scheme (i.e. 10.1000/182)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Score": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"class": {"type": "string"},
|
||||
"score": {"type": "string"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"type" : "object",
|
||||
|
@ -36,8 +33,12 @@
|
|||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"fullname": {"type": "string"},
|
||||
"name": {"type": "string"},
|
||||
"fullname" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
|
@ -55,16 +56,21 @@
|
|||
}
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "The reason why the pid was associated to the author"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "The reason why the pid was associated to the author"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The author's persistent identifiers"
|
||||
},
|
||||
"rank": {"type": "integer"},
|
||||
"surname": {"type": "string"}
|
||||
"rank" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"surname" : {
|
||||
"type" : "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -109,8 +115,12 @@
|
|||
"container" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"conferencedate": {"type": "string"},
|
||||
"conferenceplace": {"type": "string"},
|
||||
"conferencedate" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"conferenceplace" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"edition" : {
|
||||
"type" : "string",
|
||||
"description" : "Edition of the journal or conference proceeding"
|
||||
|
@ -123,9 +133,15 @@
|
|||
"type" : "string",
|
||||
"description" : "Journal issue number"
|
||||
},
|
||||
"issnLinking": {"type": "string"},
|
||||
"issnOnline": {"type": "string"},
|
||||
"issnPrinted": {"type": "string"},
|
||||
"issnLinking" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnOnline" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"issnPrinted" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"name" : {
|
||||
"type" : "string",
|
||||
"description" : "Name of the journal or conference"
|
||||
|
@ -164,10 +180,11 @@
|
|||
"description" : "The label for that code (i.e. Italy)"
|
||||
},
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "Why this result is associated to the country."}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this result is associated to the country."
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"description" : "The list of countries associated to this result"
|
||||
|
@ -175,7 +192,9 @@
|
|||
},
|
||||
"coverage" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"dateofcollection" : {
|
||||
"type" : "string",
|
||||
|
@ -183,7 +202,9 @@
|
|||
},
|
||||
"description" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"documentationUrl" : {
|
||||
"description" : "Only for results with type 'software': URL to the software documentation",
|
||||
|
@ -199,7 +220,9 @@
|
|||
},
|
||||
"format" : {
|
||||
"type" : "array",
|
||||
"items": {"type": "string"}
|
||||
"items" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"geolocation" : {
|
||||
"description" : "Geolocation information",
|
||||
|
@ -207,9 +230,15 @@
|
|||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"box": {"type": "string"},
|
||||
"place": {"type": "string"},
|
||||
"point": {"type": "string"}
|
||||
"box" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"place" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"point" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "Geolocation information"
|
||||
}
|
||||
|
@ -221,22 +250,34 @@
|
|||
"indicators" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"impactMeasures": {
|
||||
"bipIndicators" : {
|
||||
"description" : "The impact measures (i.e. popularity)",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"impulse": {"$ref": "#/definitions/Score"},
|
||||
"influence": {"$ref": "#/definitions/Score"},
|
||||
"influence_alt": {"$ref": "#/definitions/Score"},
|
||||
"popularity": {"$ref": "#/definitions/Score"},
|
||||
"popularity_alt": {"$ref": "#/definitions/Score"}
|
||||
"class" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"indicator" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"score" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The impact measures (i.e. popularity)"
|
||||
}
|
||||
},
|
||||
"usageCounts" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"downloads": {"type": "string"},
|
||||
"views": {"type": "string"}
|
||||
"downloads" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"views" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The usage counts (i.e. downloads)"
|
||||
}
|
||||
|
@ -262,12 +303,7 @@
|
|||
},
|
||||
"openAccessRoute" : {
|
||||
"type" : "string",
|
||||
"enum": [
|
||||
"gold",
|
||||
"green",
|
||||
"hybrid",
|
||||
"bronze"
|
||||
]
|
||||
"enum" : [ "gold", "green", "hybrid", "bronze" ]
|
||||
},
|
||||
"scheme" : {
|
||||
"type" : "string",
|
||||
|
@ -297,15 +333,23 @@
|
|||
"articleprocessingcharge" : {
|
||||
"type" : "object",
|
||||
"properties" : {
|
||||
"amount": {"type": "string"},
|
||||
"currency": {"type": "string"}
|
||||
"amount" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"currency" : {
|
||||
"type" : "string"
|
||||
}
|
||||
},
|
||||
"description" : "The money spent to make this book or article available in Open Access. Source for this information is the OpenAPC initiative."
|
||||
},
|
||||
"license": {"type": "string"},
|
||||
"license" : {
|
||||
"type" : "string"
|
||||
},
|
||||
"pid" : {
|
||||
"type" : "array",
|
||||
"items": {"$ref": "#/definitions/ResultPid"}
|
||||
"items" : {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}
|
||||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
|
@ -364,10 +408,11 @@
|
|||
"description" : "Persistent identifiers of the result",
|
||||
"type" : "array",
|
||||
"items" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/ResultPid"},
|
||||
{"description": "Persistent identifiers of the result"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/ResultPid"
|
||||
}, {
|
||||
"description" : "Persistent identifiers of the result"
|
||||
} ]
|
||||
}
|
||||
},
|
||||
"programmingLanguage" : {
|
||||
|
@ -376,7 +421,7 @@
|
|||
},
|
||||
"publicationdate" : {
|
||||
"type" : "string",
|
||||
"description": "Main date of the research product: typically the publication or issued date. In case of a research result with different versions with different dates, the date of the result is selected as the most frequent well-formatted date. If not available, then the most recent and complete date among those that are well-formatted. For statistics, the year is extracted and the result is counted only among the result of that year. Example: Pre-print date: 2019-02-03, Article date provided by repository: 2020-02, Article date provided by Crossref: 2020, OpenAIRE will set as date 2019-02-03, because it\u2019s the most recent among the complete and well-formed dates. If then the repository updates the metadata and set a complete date (e.g. 2020-02-12), then this will be the new date for the result because it becomes the most recent most complete date. However, if OpenAIRE then collects the pre-print from another repository with date 2019-02-03, then this will be the \u201cwinning date\u201d because it becomes the most frequent well-formatted date."
|
||||
"description" : "Main date of the research product: typically the publication or issued date. In case of a research result with different versions with different dates, the date of the result is selected as the most frequent well-formatted date. If not available, then the most recent and complete date among those that are well-formatted. For statistics, the year is extracted and the result is counted only among the result of that year. Example: Pre-print date: 2019-02-03, Article date provided by repository: 2020-02, Article date provided by Crossref: 2020, OpenAIRE will set as date 2019-02-03, because it’s the most recent among the complete and well-formed dates. If then the repository updates the metadata and set a complete date (e.g. 2020-02-12), then this will be the new date for the result because it becomes the most recent most complete date. However, if OpenAIRE then collects the pre-print from another repository with date 2019-02-03, then this will be the “winning date” because it becomes the most frequent well-formatted date."
|
||||
},
|
||||
"publisher" : {
|
||||
"type" : "string",
|
||||
|
@ -401,10 +446,11 @@
|
|||
"type" : "object",
|
||||
"properties" : {
|
||||
"provenance" : {
|
||||
"allOf": [
|
||||
{"$ref": "#/definitions/Provenance"},
|
||||
{"description": "Why this subject is associated to the result"}
|
||||
]
|
||||
"allOf" : [ {
|
||||
"$ref" : "#/definitions/Provenance"
|
||||
}, {
|
||||
"description" : "Why this subject is associated to the result"
|
||||
} ]
|
||||
},
|
||||
"subject" : {
|
||||
"type" : "object",
|
||||
|
|
|
@ -10,10 +10,7 @@ import com.github.victools.jsonschema.generator.*;
|
|||
|
||||
import eu.dnetlib.dhp.ExecCreateSchemas;
|
||||
import eu.dnetlib.dhp.oa.model.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Datasource;
|
||||
import eu.dnetlib.dhp.oa.model.graph.GraphResult;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Organization;
|
||||
import eu.dnetlib.dhp.oa.model.graph.ResearchCommunity;
|
||||
import eu.dnetlib.dhp.oa.model.graph.*;
|
||||
|
||||
//@Disabled
|
||||
class GenerateJsonSchema {
|
||||
|
@ -44,7 +41,7 @@ class GenerateJsonSchema {
|
|||
.without(Option.NONPUBLIC_NONSTATIC_FIELDS_WITHOUT_GETTERS);
|
||||
SchemaGeneratorConfig config = configBuilder.build();
|
||||
SchemaGenerator generator = new SchemaGenerator(config);
|
||||
JsonNode jsonSchema = generator.generateSchema(GraphResult.class);
|
||||
JsonNode jsonSchema = generator.generateSchema(Project.class);
|
||||
|
||||
System.out.println(jsonSchema.toString());
|
||||
}
|
||||
|
|
|
@ -0,0 +1,190 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.common;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
||||
public class MakeTarArchive implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MakeTarArchive.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
MakeTarArchive.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/common/input_maketar_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String outputPath = parser.get("hdfsPath");
|
||||
log.info("hdfsPath: {}", outputPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("input path : {}", inputPath);
|
||||
|
||||
final int gBperSplit = Optional
|
||||
.ofNullable(parser.get("splitSize"))
|
||||
.map(Integer::valueOf)
|
||||
.orElse(10);
|
||||
|
||||
final boolean rename = Optional
|
||||
.ofNullable(parser.get("rename"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.FALSE);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
|
||||
makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit, rename);
|
||||
|
||||
}
|
||||
|
||||
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
|
||||
throws IOException {
|
||||
makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit, false);
|
||||
}
|
||||
|
||||
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit,
|
||||
boolean rename)
|
||||
throws IOException {
|
||||
|
||||
RemoteIterator<LocatedFileStatus> dirIterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
||||
|
||||
while (dirIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = dirIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String pathString = p.toString();
|
||||
String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
|
||||
|
||||
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit, rename);
|
||||
}
|
||||
}
|
||||
|
||||
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
|
||||
Path hdfsWritePath = new Path(outputPath);
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fileSystem.delete(hdfsWritePath, true);
|
||||
|
||||
}
|
||||
return new TarArchiveOutputStream(fileSystem.create(hdfsWritePath).getWrappedStream());
|
||||
}
|
||||
|
||||
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dirName,
|
||||
boolean rename)
|
||||
throws IOException {
|
||||
|
||||
Path hdfsWritePath = new Path(outputPath);
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fileSystem.delete(hdfsWritePath, true);
|
||||
|
||||
}
|
||||
try (TarArchiveOutputStream ar = new TarArchiveOutputStream(
|
||||
fileSystem.create(hdfsWritePath).getWrappedStream())) {
|
||||
|
||||
RemoteIterator<LocatedFileStatus> iterator = fileSystem
|
||||
.listFiles(
|
||||
new Path(inputPath), true);
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
writeCurrentFile(fileSystem, dirName, iterator, ar, 0, rename);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static void tarMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name,
|
||||
int gBperSplit, boolean rename) throws IOException {
|
||||
final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit;
|
||||
|
||||
long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed();
|
||||
|
||||
if (sourceSize < bytesPerSplit) {
|
||||
write(fileSystem, inputPath, outputPath + ".tar", dir_name, rename);
|
||||
} else {
|
||||
int partNum = 0;
|
||||
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||
.listFiles(
|
||||
new Path(inputPath), true);
|
||||
boolean next = fileStatusListIterator.hasNext();
|
||||
while (next) {
|
||||
try (TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar")) {
|
||||
|
||||
long currentSize = 0;
|
||||
while (next && currentSize < bytesPerSplit) {
|
||||
currentSize = writeCurrentFile(
|
||||
fileSystem, dir_name, fileStatusListIterator, ar, currentSize, rename);
|
||||
next = fileStatusListIterator.hasNext();
|
||||
|
||||
}
|
||||
|
||||
partNum += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static long writeCurrentFile(FileSystem fileSystem, String dirName,
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator,
|
||||
TarArchiveOutputStream ar, long currentSize, boolean rename) throws IOException {
|
||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String pString = p.toString();
|
||||
if (!pString.endsWith("_SUCCESS")) {
|
||||
String name = pString.substring(pString.lastIndexOf("/") + 1);
|
||||
if (name.startsWith("part-") & name.length() > 10) {
|
||||
String tmp = name.substring(0, 10);
|
||||
if (name.contains(".")) {
|
||||
tmp += name.substring(name.indexOf("."));
|
||||
}
|
||||
name = tmp;
|
||||
}
|
||||
if (rename) {
|
||||
if (name.endsWith(".txt.gz"))
|
||||
name = name.replace(".txt.gz", ".json.gz");
|
||||
}
|
||||
|
||||
TarArchiveEntry entry = new TarArchiveEntry(dirName + "/" + name);
|
||||
entry.setSize(fileStatus.getLen());
|
||||
currentSize += fileStatus.getLen();
|
||||
ar.putArchiveEntry(entry);
|
||||
|
||||
InputStream is = fileSystem.open(fileStatus.getPath());
|
||||
|
||||
BufferedInputStream bis = new BufferedInputStream(is);
|
||||
|
||||
int count;
|
||||
byte[] data = new byte[1024];
|
||||
while ((count = bis.read(data, 0, data.length)) != -1) {
|
||||
ar.write(data, 0, count);
|
||||
}
|
||||
bis.close();
|
||||
ar.closeArchiveEntry();
|
||||
|
||||
}
|
||||
return currentSize;
|
||||
}
|
||||
|
||||
}
|
|
@ -15,7 +15,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.MakeTarArchive;
|
||||
import eu.dnetlib.dhp.oa.common.MakeTarArchive;
|
||||
|
||||
public class MakeTar implements Serializable {
|
||||
|
||||
|
|
|
@ -8,8 +8,7 @@ import java.util.*;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.CardinalityTooHighException;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.NoAvailableEntityTypeException;
|
||||
|
@ -31,7 +30,6 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
public class ResultMapper implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(ResultMapper.class);
|
||||
|
||||
public static <E extends eu.dnetlib.dhp.schema.oaf.OafEntity> Result map(
|
||||
E in, Map<String, String> communityMap, String dumpType)
|
||||
|
@ -49,217 +47,44 @@ public class ResultMapper implements Serializable {
|
|||
if (ort.isPresent()) {
|
||||
try {
|
||||
|
||||
addTypeSpecificInformation(out, input, ort);
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getAuthor())
|
||||
.ifPresent(
|
||||
ats -> out.setAuthor(ats.stream().map(ResultMapper::getAuthor).collect(Collectors.toList())));
|
||||
|
||||
// I do not map Access Right UNKNOWN or OTHER
|
||||
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oar = Optional.ofNullable(input.getBestaccessright());
|
||||
if (oar.isPresent() && Constants.ACCESS_RIGHTS_COAR_MAP.containsKey(oar.get().getClassid())) {
|
||||
String code = Constants.ACCESS_RIGHTS_COAR_MAP.get(oar.get().getClassid());
|
||||
out
|
||||
.setBestaccessright(
|
||||
|
||||
BestAccessRight
|
||||
.newInstance(
|
||||
code,
|
||||
Constants.COAR_CODE_LABEL_MAP.get(code),
|
||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
addTypeSpecificInformation(out, input, ort.get());
|
||||
mapAuthor(out, input);
|
||||
mapAccessRight(out, input);
|
||||
mapContributor(out, input);
|
||||
mapCountry(out, input);
|
||||
mapCoverage(out, input);
|
||||
out.setDateofcollection(input.getDateofcollection());
|
||||
mapDescription(out, input);
|
||||
mapEmbargo(out, input);
|
||||
mapFormat(out, input);
|
||||
out.setId(input.getId().substring(3));
|
||||
mapOriginalId(out, input);
|
||||
mapInstance(dumpType, out, input);
|
||||
mapLanguage(out, input);
|
||||
mapLastUpdateTimestamp(out, input);
|
||||
mapTitle(out, input);
|
||||
mapPid(out, input);
|
||||
mapDateOfAcceptance(out, input);
|
||||
mapPublisher(out, input);
|
||||
mapSource(out, input);
|
||||
mapSubject(out, input);
|
||||
out.setType(input.getResulttype().getClassid());
|
||||
mapMeasure(out, input);
|
||||
if (!Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
mapCollectedfrom((CommunityResult) out, input);
|
||||
mapContext(communityMap, (CommunityResult) out, input);
|
||||
}
|
||||
|
||||
final List<String> contributorList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getContributor())
|
||||
.ifPresent(value -> value.stream().forEach(c -> contributorList.add(c.getValue())));
|
||||
out.setContributor(contributorList);
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getCountry())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setCountry(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
c -> {
|
||||
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
||||
} catch (ClassCastException cce) {
|
||||
return null;
|
||||
}
|
||||
ResultCountry country = new ResultCountry();
|
||||
country.setCode(c.getClassid());
|
||||
country.setLabel(c.getClassname());
|
||||
Optional
|
||||
.ofNullable(c.getDataInfo())
|
||||
.ifPresent(
|
||||
provenance -> country
|
||||
.setProvenance(
|
||||
Provenance
|
||||
.newInstance(
|
||||
provenance
|
||||
.getProvenanceaction()
|
||||
.getClassname(),
|
||||
c.getDataInfo().getTrust())));
|
||||
return country;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
final List<String> coverageList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getCoverage())
|
||||
.ifPresent(value -> value.stream().forEach(c -> coverageList.add(c.getValue())));
|
||||
out.setCoverage(coverageList);
|
||||
|
||||
out.setDateofcollection(input.getDateofcollection());
|
||||
|
||||
final List<String> descriptionList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getDescription())
|
||||
.ifPresent(value -> value.forEach(d -> descriptionList.add(d.getValue())));
|
||||
out.setDescription(descriptionList);
|
||||
Optional<Field<String>> oStr = Optional.ofNullable(input.getEmbargoenddate());
|
||||
if (oStr.isPresent()) {
|
||||
out.setEmbargoenddate(oStr.get().getValue());
|
||||
}
|
||||
|
||||
final List<String> formatList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getFormat())
|
||||
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
||||
out.setFormat(formatList);
|
||||
out.setId(input.getId());
|
||||
out.setOriginalId(new ArrayList<>());
|
||||
Optional
|
||||
.ofNullable(input.getOriginalId())
|
||||
.ifPresent(
|
||||
v -> out
|
||||
.setOriginalId(
|
||||
input
|
||||
.getOriginalId()
|
||||
.stream()
|
||||
.filter(s -> !s.startsWith("50|"))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
||||
.ofNullable(input.getInstance());
|
||||
|
||||
if (oInst.isPresent()) {
|
||||
if (Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
((GraphResult) out)
|
||||
.setInstance(
|
||||
oInst.get().stream().map(ResultMapper::getGraphInstance).collect(Collectors.toList()));
|
||||
} else {
|
||||
((CommunityResult) out)
|
||||
.setInstance(
|
||||
oInst
|
||||
.get()
|
||||
.stream()
|
||||
.map(ResultMapper::getCommunityInstance)
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
||||
if (oL.isPresent()) {
|
||||
eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
|
||||
out.setLanguage(Language.newInstance(language.getClassid(), language.getClassname()));
|
||||
}
|
||||
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
||||
if (oLong.isPresent()) {
|
||||
out.setLastupdatetimestamp(oLong.get());
|
||||
}
|
||||
Optional<List<StructuredProperty>> otitle = Optional.ofNullable(input.getTitle());
|
||||
if (otitle.isPresent()) {
|
||||
List<StructuredProperty> iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("main title"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setMaintitle(iTitle.get(0).getValue());
|
||||
}
|
||||
|
||||
iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("subtitle"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setSubtitle(iTitle.get(0).getValue());
|
||||
}
|
||||
return out;
|
||||
|
||||
}
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getPid())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setPid(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
p -> ResultPid
|
||||
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
oStr = Optional.ofNullable(input.getDateofacceptance());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublicationdate(oStr.get().getValue());
|
||||
}
|
||||
oStr = Optional.ofNullable(input.getPublisher());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublisher(oStr.get().getValue());
|
||||
}
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getSource())
|
||||
.ifPresent(
|
||||
value -> out.setSource(value.stream().map(Field::getValue).collect(Collectors.toList())));
|
||||
|
||||
List<Subject> subjectList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getSubject())
|
||||
.ifPresent(
|
||||
value -> value
|
||||
.stream()
|
||||
.filter(
|
||||
s -> !((s.getQualifier().getClassid().equalsIgnoreCase("fos") &&
|
||||
Optional.ofNullable(s.getDataInfo()).isPresent()
|
||||
&& Optional.ofNullable(s.getDataInfo().getProvenanceaction()).isPresent() &&
|
||||
s.getDataInfo().getProvenanceaction().getClassid().equalsIgnoreCase("subject:fos"))
|
||||
||
|
||||
(s.getQualifier().getClassid().equalsIgnoreCase("sdg") &&
|
||||
Optional.ofNullable(s.getDataInfo()).isPresent()
|
||||
&& Optional.ofNullable(s.getDataInfo().getProvenanceaction()).isPresent() &&
|
||||
s
|
||||
.getDataInfo()
|
||||
.getProvenanceaction()
|
||||
.getClassid()
|
||||
.equalsIgnoreCase("subject:sdg"))))
|
||||
.forEach(s -> subjectList.add(getSubject(s))));
|
||||
|
||||
out.setSubjects(subjectList);
|
||||
|
||||
out.setType(input.getResulttype().getClassid());
|
||||
|
||||
if (Optional.ofNullable(input.getMeasures()).isPresent() && input.getMeasures().size() > 0) {
|
||||
|
||||
out.setIndicators(getIndicator(input.getMeasures()));
|
||||
}
|
||||
|
||||
if (!Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
((CommunityResult) out)
|
||||
.setCollectedfrom(
|
||||
input
|
||||
.getCollectedfrom()
|
||||
.stream()
|
||||
.map(cf -> CfHbKeyValue.newInstance(cf.getKey(), cf.getValue()))
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
private static void mapContext(Map<String, String> communityMap, CommunityResult out,
|
||||
eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Set<String> communities = communityMap.keySet();
|
||||
List<Context> contextList = Optional
|
||||
.ofNullable(
|
||||
|
@ -320,94 +145,263 @@ public class ResultMapper implements Serializable {
|
|||
hashValue.add(c.hashCode());
|
||||
}
|
||||
});
|
||||
((CommunityResult) out).setContext(remainigContext);
|
||||
out.setContext(remainigContext);
|
||||
}
|
||||
}
|
||||
} catch (ClassCastException cce) {
|
||||
|
||||
private static void mapCollectedfrom(CommunityResult out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
out
|
||||
.setCollectedfrom(
|
||||
input
|
||||
.getCollectedfrom()
|
||||
.stream()
|
||||
.map(cf -> CfHbKeyValue.newInstance(cf.getKey().substring(3), cf.getValue()))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
private static void mapMeasure(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
if (Optional.ofNullable(input.getMeasures()).isPresent() && input.getMeasures().size() > 0) {
|
||||
|
||||
out.setIndicators(Utils.getIndicator(input.getMeasures()));
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapSubject(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
List<Subject> subjectList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getSubject())
|
||||
.ifPresent(
|
||||
value -> value
|
||||
.stream()
|
||||
// .filter(
|
||||
// s -> !((s.getQualifier().getClassid().equalsIgnoreCase("fos") &&
|
||||
// Optional.ofNullable(s.getDataInfo()).isPresent()
|
||||
// && Optional.ofNullable(s.getDataInfo().getProvenanceaction()).isPresent() &&
|
||||
// s.getDataInfo().getProvenanceaction().getClassid().equalsIgnoreCase("subject:fos"))
|
||||
// ||
|
||||
// (s.getQualifier().getClassid().equalsIgnoreCase("sdg") &&
|
||||
// Optional.ofNullable(s.getDataInfo()).isPresent()
|
||||
// && Optional.ofNullable(s.getDataInfo().getProvenanceaction()).isPresent() &&
|
||||
// s
|
||||
// .getDataInfo()
|
||||
// .getProvenanceaction()
|
||||
// .getClassid()
|
||||
// .equalsIgnoreCase("subject:sdg"))))
|
||||
.forEach(s -> subjectList.add(getSubject(s))));
|
||||
|
||||
out.setSubjects(subjectList);
|
||||
}
|
||||
|
||||
private static void mapSource(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional
|
||||
.ofNullable(input.getSource())
|
||||
.ifPresent(
|
||||
value -> out.setSource(value.stream().map(Field::getValue).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
private static void mapPublisher(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<Field<String>> oStr;
|
||||
oStr = Optional.ofNullable(input.getPublisher());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublisher(oStr.get().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapDateOfAcceptance(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<Field<String>> oStr;
|
||||
oStr = Optional.ofNullable(input.getDateofacceptance());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublicationdate(oStr.get().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapPid(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional
|
||||
.ofNullable(input.getPid())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setPid(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
p -> ResultPid
|
||||
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
private static void mapTitle(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<List<StructuredProperty>> otitle = Optional.ofNullable(input.getTitle());
|
||||
if (otitle.isPresent()) {
|
||||
List<StructuredProperty> iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("main title"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setMaintitle(iTitle.get(0).getValue());
|
||||
}
|
||||
|
||||
iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("subtitle"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setSubtitle(iTitle.get(0).getValue());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapLastUpdateTimestamp(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
||||
if (oLong.isPresent()) {
|
||||
out.setLastupdatetimestamp(oLong.get());
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapLanguage(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
||||
if (oL.isPresent()) {
|
||||
Qualifier language = oL.get();
|
||||
out.setLanguage(Language.newInstance(language.getClassid(), language.getClassname()));
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapInstance(String dumpType, Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
||||
.ofNullable(input.getInstance());
|
||||
|
||||
if (oInst.isPresent()) {
|
||||
if (DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
((GraphResult) out)
|
||||
.setInstance(
|
||||
oInst.get().stream().map(ResultMapper::getGraphInstance).collect(Collectors.toList()));
|
||||
} else {
|
||||
((CommunityResult) out)
|
||||
.setInstance(
|
||||
oInst
|
||||
.get()
|
||||
.stream()
|
||||
.map(ResultMapper::getCommunityInstance)
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapOriginalId(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
out.setOriginalId(new ArrayList<>());
|
||||
Optional
|
||||
.ofNullable(input.getOriginalId())
|
||||
.ifPresent(
|
||||
v -> out
|
||||
.setOriginalId(
|
||||
input
|
||||
.getOriginalId()
|
||||
.stream()
|
||||
.filter(s -> !s.startsWith("50|"))
|
||||
.collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
private static void mapFormat(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
final List<String> formatList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getFormat())
|
||||
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
||||
out.setFormat(formatList);
|
||||
}
|
||||
|
||||
private static void mapEmbargo(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional<Field<String>> oStr = Optional.ofNullable(input.getEmbargoenddate());
|
||||
if (oStr.isPresent()) {
|
||||
out.setEmbargoenddate(oStr.get().getValue());
|
||||
}
|
||||
}
|
||||
|
||||
private static void mapDescription(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
final List<String> descriptionList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getDescription())
|
||||
.ifPresent(value -> value.forEach(d -> descriptionList.add(d.getValue())));
|
||||
out.setDescription(descriptionList);
|
||||
}
|
||||
|
||||
private static void mapCoverage(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
final List<String> coverageList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getCoverage())
|
||||
.ifPresent(value -> value.stream().forEach(c -> coverageList.add(c.getValue())));
|
||||
out.setCoverage(coverageList);
|
||||
}
|
||||
|
||||
private static void mapCountry(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional
|
||||
.ofNullable(input.getCountry())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setCountry(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
c -> {
|
||||
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
||||
return null;
|
||||
}
|
||||
ResultCountry country = new ResultCountry();
|
||||
country.setCode(c.getClassid());
|
||||
country.setLabel(c.getClassname());
|
||||
Optional
|
||||
.ofNullable(c.getDataInfo())
|
||||
.ifPresent(
|
||||
provenance -> country
|
||||
.setProvenance(
|
||||
Provenance
|
||||
.newInstance(
|
||||
provenance
|
||||
.getProvenanceaction()
|
||||
.getClassname(),
|
||||
c.getDataInfo().getTrust())));
|
||||
return country;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
return out;
|
||||
|
||||
private static void mapContributor(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
final List<String> contributorList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getContributor())
|
||||
.ifPresent(value -> value.stream().forEach(c -> contributorList.add(c.getValue())));
|
||||
out.setContributor(contributorList);
|
||||
}
|
||||
|
||||
private static Indicator getIndicator(List<eu.dnetlib.dhp.schema.oaf.Measure> measures) {
|
||||
UsageCounts uc = null;
|
||||
ImpactMeasures im = null;
|
||||
Indicator i = new Indicator();
|
||||
for (eu.dnetlib.dhp.schema.oaf.Measure m : measures) {
|
||||
switch (m.getId()) {
|
||||
case USAGE_COUNT_DOWNLOADS:
|
||||
if (uc == null) {
|
||||
uc = new UsageCounts();
|
||||
i.setUsageCounts(uc);
|
||||
}
|
||||
uc.setDownloads(m.getUnit().get(0).getValue());
|
||||
break;
|
||||
case USAGE_COUNT_VIEWS:
|
||||
if (uc == null) {
|
||||
uc = new UsageCounts();
|
||||
i.setUsageCounts(uc);
|
||||
}
|
||||
uc.setViews(m.getUnit().get(0).getValue());
|
||||
break;
|
||||
case IMPACT_POPULARITY:
|
||||
if (im == null) {
|
||||
im = new ImpactMeasures();
|
||||
i.setImpactMeasures(im);
|
||||
}
|
||||
im.setPopularity(getScore(m.getUnit()));
|
||||
break;
|
||||
case IMPACT_POPULARITY_ALT:
|
||||
if (im == null) {
|
||||
im = new ImpactMeasures();
|
||||
i.setImpactMeasures(im);
|
||||
}
|
||||
im.setPopularity_alt(getScore(m.getUnit()));
|
||||
break;
|
||||
case IMPACT_IMPULSE:
|
||||
if (im == null) {
|
||||
im = new ImpactMeasures();
|
||||
i.setImpactMeasures(im);
|
||||
}
|
||||
im.setImpulse(getScore(m.getUnit()));
|
||||
break;
|
||||
case IMPACT_INFLUENCE:
|
||||
if (im == null) {
|
||||
im = new ImpactMeasures();
|
||||
i.setImpactMeasures(im);
|
||||
}
|
||||
im.setInfluence(getScore(m.getUnit()));
|
||||
break;
|
||||
case IMPACT_INFLUENCE_ALT:
|
||||
if (im == null) {
|
||||
im = new ImpactMeasures();
|
||||
i.setImpactMeasures(im);
|
||||
}
|
||||
im.setInfluence_alt(getScore(m.getUnit()));
|
||||
break;
|
||||
private static void mapAccessRight(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
// I do not map Access Right UNKNOWN or OTHER
|
||||
|
||||
Optional<Qualifier> oar = Optional.ofNullable(input.getBestaccessright());
|
||||
if (oar.isPresent() && Constants.ACCESS_RIGHTS_COAR_MAP.containsKey(oar.get().getClassid())) {
|
||||
String code = Constants.ACCESS_RIGHTS_COAR_MAP.get(oar.get().getClassid());
|
||||
out
|
||||
.setBestaccessright(
|
||||
|
||||
BestAccessRight
|
||||
.newInstance(
|
||||
code,
|
||||
Constants.COAR_CODE_LABEL_MAP.get(code),
|
||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
}
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
private static Score getScore(List<KeyValue> unit) {
|
||||
Score s = new Score();
|
||||
for (KeyValue u : unit) {
|
||||
if (u.getKey().equals("score")) {
|
||||
s.setScore(u.getValue());
|
||||
} else {
|
||||
s.setClazz(u.getValue());
|
||||
}
|
||||
}
|
||||
return s;
|
||||
private static void mapAuthor(Result out, eu.dnetlib.dhp.schema.oaf.Result input) {
|
||||
Optional
|
||||
.ofNullable(input.getAuthor())
|
||||
.ifPresent(
|
||||
ats -> out.setAuthor(ats.stream().map(ResultMapper::getAuthor).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
private static void addTypeSpecificInformation(Result out, eu.dnetlib.dhp.schema.oaf.Result input,
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort) throws NoAvailableEntityTypeException {
|
||||
switch (ort.get().getClassid()) {
|
||||
eu.dnetlib.dhp.schema.oaf.Qualifier ort) throws NoAvailableEntityTypeException {
|
||||
switch (ort.getClassid()) {
|
||||
case "publication":
|
||||
Optional<Journal> journal = Optional
|
||||
.ofNullable(((Publication) input).getJournal());
|
||||
|
@ -521,14 +515,20 @@ public class ResultMapper implements Serializable {
|
|||
|
||||
setCommonValue(i, instance);
|
||||
|
||||
if (Optional.ofNullable(i.getCollectedfrom()).isPresent() &&
|
||||
Optional.ofNullable(i.getCollectedfrom().getKey()).isPresent() &&
|
||||
StringUtils.isNotBlank(i.getCollectedfrom().getKey()))
|
||||
instance
|
||||
.setCollectedfrom(
|
||||
CfHbKeyValue
|
||||
.newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
|
||||
.newInstance(i.getCollectedfrom().getKey().substring(3), i.getCollectedfrom().getValue()));
|
||||
|
||||
if (Optional.ofNullable(i.getHostedby()).isPresent() &&
|
||||
Optional.ofNullable(i.getHostedby().getKey()).isPresent() &&
|
||||
StringUtils.isNotBlank(i.getHostedby().getKey()))
|
||||
instance
|
||||
.setHostedby(
|
||||
CfHbKeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
|
||||
CfHbKeyValue.newInstance(i.getHostedby().getKey().substring(3), i.getHostedby().getValue()));
|
||||
|
||||
return instance;
|
||||
|
||||
|
@ -548,8 +548,6 @@ public class ResultMapper implements Serializable {
|
|||
Constants.COAR_CODE_LABEL_MAP.get(code),
|
||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
||||
|
||||
if (opAr.get().getOpenAccessRoute() != null) {
|
||||
switch (opAr.get().getOpenAccessRoute()) {
|
||||
case hybrid:
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import static eu.dnetlib.dhp.oa.graph.dump.Constants.*;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
|
@ -19,9 +23,14 @@ import com.google.gson.Gson;
|
|||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.complete.Constants;
|
||||
import eu.dnetlib.dhp.oa.model.Indicator;
|
||||
import eu.dnetlib.dhp.oa.model.Score;
|
||||
import eu.dnetlib.dhp.oa.model.UsageCounts;
|
||||
import eu.dnetlib.dhp.oa.model.graph.GraphResult;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Relation;
|
||||
import eu.dnetlib.dhp.oa.model.graph.ResearchCommunity;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
import eu.dnetlib.dhp.schema.oaf.Measure;
|
||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
|
@ -119,18 +128,18 @@ public class Utils {
|
|||
return dumpedIds;
|
||||
}
|
||||
|
||||
public static Dataset<Relation> getValidRelations(SparkSession spark, Dataset<Relation> relations,
|
||||
public static Dataset<Relation> getValidRelations(Dataset<Relation> relations,
|
||||
Dataset<String> entitiesIds) {
|
||||
Dataset<Tuple2<String, Relation>> relationSource = relations
|
||||
.map(
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(r.getSource().getId(), r),
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(r.getSource(), r),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
Dataset<Tuple2<String, Relation>> relJoinSource = relationSource
|
||||
.joinWith(entitiesIds, relationSource.col("_1").equalTo(entitiesIds.col("value")))
|
||||
.map(
|
||||
(MapFunction<Tuple2<Tuple2<String, Relation>, String>, Tuple2<String, Relation>>) t2 -> new Tuple2<>(
|
||||
t2._1()._2().getTarget().getId(), t2._1()._2()),
|
||||
t2._1()._2().getTarget(), t2._1()._2()),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
return relJoinSource
|
||||
|
@ -140,4 +149,68 @@ public class Utils {
|
|||
Encoders.bean(Relation.class));
|
||||
}
|
||||
|
||||
public static Indicator getIndicator(List<Measure> measures) {
|
||||
Indicator i = new Indicator();
|
||||
for (eu.dnetlib.dhp.schema.oaf.Measure m : measures) {
|
||||
switch (m.getId()) {
|
||||
case USAGE_COUNT_DOWNLOADS:
|
||||
getUsageCounts(i).setDownloads(m.getUnit().get(0).getValue());
|
||||
break;
|
||||
case USAGE_COUNT_VIEWS:
|
||||
getUsageCounts(i).setViews(m.getUnit().get(0).getValue());
|
||||
break;
|
||||
default:
|
||||
getImpactMeasure(i).add(getScore(m.getId(), m.getUnit()));
|
||||
break;
|
||||
// case IMPACT_POPULARITY:
|
||||
// getImpactMeasure(i).setPopularity(getScore(m.getUnit()));
|
||||
// break;
|
||||
// case IMPACT_POPULARITY_ALT:
|
||||
// getImpactMeasure(i).setPopularity_alt(getScore(m.getUnit()));
|
||||
// break;
|
||||
// case IMPACT_IMPULSE:
|
||||
// getImpactMeasure(i).setImpulse(getScore(m.getUnit()));
|
||||
// break;
|
||||
// case IMPACT_INFLUENCE:
|
||||
// getImpactMeasure(i).setInfluence(getScore(m.getUnit()));
|
||||
// break;
|
||||
// case IMPACT_INFLUENCE_ALT:
|
||||
// getImpactMeasure(i).setInfluence_alt(getScore(m.getUnit()));
|
||||
// break;
|
||||
// default:
|
||||
// break;
|
||||
}
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static UsageCounts getUsageCounts(Indicator i) {
|
||||
if (i.getUsageCounts() == null) {
|
||||
i.setUsageCounts(new UsageCounts());
|
||||
}
|
||||
return i.getUsageCounts();
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static List<Score> getImpactMeasure(Indicator i) {
|
||||
if (i.getBipIndicators() == null) {
|
||||
i.setBipIndicators(new ArrayList<>());
|
||||
}
|
||||
return i.getBipIndicators();
|
||||
}
|
||||
|
||||
private static Score getScore(String indicator, List<KeyValue> unit) {
|
||||
Score s = new Score();
|
||||
s.setIndicator(indicator);
|
||||
for (KeyValue u : unit) {
|
||||
if (u.getKey().equals("score")) {
|
||||
s.setScore(u.getValue());
|
||||
} else {
|
||||
s.setClazz(u.getValue());
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,34 +55,21 @@ public class CommunitySplit implements Serializable {
|
|||
communities
|
||||
.keySet()
|
||||
.stream()
|
||||
.forEach(c -> printResult(c, result, outputPath + "/" + communities.get(c).replace(" ", "_")));
|
||||
|
||||
}
|
||||
|
||||
private static void printResult(String c, Dataset<CommunityResult> result, String outputPath) {
|
||||
Dataset<CommunityResult> communityProducts = result
|
||||
.filter((FilterFunction<CommunityResult>) r -> containsCommunity(r, c));
|
||||
|
||||
communityProducts
|
||||
.parallel()
|
||||
.forEach(c -> {
|
||||
result
|
||||
.filter(
|
||||
(FilterFunction<CommunityResult>) r -> Optional.ofNullable(r.getContext()).isPresent() &&
|
||||
r.getContext().stream().anyMatch(con -> con.getCode().equals(c)))
|
||||
.map(
|
||||
(MapFunction<CommunityResult, String>) cr -> new ObjectMapper().writeValueAsString(cr),
|
||||
Encoders.STRING())
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.text(outputPath);
|
||||
.text(outputPath + "/" + communities.get(c).replace(" ", "_"));
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static boolean containsCommunity(CommunityResult r, String c) {
|
||||
if (Optional.ofNullable(r.getContext()).isPresent()) {
|
||||
return r
|
||||
.getContext()
|
||||
.stream()
|
||||
.map(Context::getCode)
|
||||
.collect(Collectors.toList())
|
||||
.contains(c);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,6 +61,13 @@ public class SparkPrepareResultProject implements Serializable {
|
|||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
Boolean substring = Optional
|
||||
.ofNullable(parser.get("substring"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
|
@ -74,11 +81,12 @@ public class SparkPrepareResultProject implements Serializable {
|
|||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
prepareResultProjectList(spark, inputPath, outputPath);
|
||||
prepareResultProjectList(spark, inputPath, outputPath, substring);
|
||||
});
|
||||
}
|
||||
|
||||
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) {
|
||||
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath,
|
||||
Boolean substring) {
|
||||
Dataset<Relation> relation = Utils
|
||||
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||
.filter(
|
||||
|
@ -101,6 +109,9 @@ public class SparkPrepareResultProject implements Serializable {
|
|||
Set<String> projectSet = new HashSet<>();
|
||||
Tuple2<eu.dnetlib.dhp.schema.oaf.Project, Relation> first = it.next();
|
||||
ResultProject rp = new ResultProject();
|
||||
if (substring)
|
||||
rp.setResultId(s.substring(3));
|
||||
else
|
||||
rp.setResultId(s);
|
||||
eu.dnetlib.dhp.schema.oaf.Project p = first._1();
|
||||
projectSet.add(p.getId());
|
||||
|
@ -131,7 +142,7 @@ public class SparkPrepareResultProject implements Serializable {
|
|||
private static Project getProject(eu.dnetlib.dhp.schema.oaf.Project op, Relation relation) {
|
||||
Project p = Project
|
||||
.newInstance(
|
||||
op.getId(),
|
||||
op.getId().substring(3),
|
||||
op.getCode().getValue(),
|
||||
Optional
|
||||
.ofNullable(op.getAcronym())
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
@ -14,15 +12,20 @@ import java.util.function.Function;
|
|||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.MyRuntimeException;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.MasterDuplicate;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.ReadMasterDuplicateFromDB;
|
||||
import eu.dnetlib.dhp.oa.model.graph.*;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||
|
@ -63,15 +66,23 @@ public class CreateContextRelation implements Serializable {
|
|||
log.info("hdfsPath: {}", hdfsPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
log.info("hdfsNameNode: {}", hdfsNameNode);
|
||||
|
||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||
|
||||
final String masterDuplicatePath = parser.get("masterDuplicate");
|
||||
log.info("masterDuplicatePath: {}", masterDuplicatePath);
|
||||
|
||||
final CreateContextRelation cce = new CreateContextRelation(hdfsPath, hdfsNameNode, isLookUpUrl);
|
||||
|
||||
final List<MasterDuplicate> masterDuplicateList = cce.readMasterDuplicate(masterDuplicatePath);
|
||||
|
||||
log.info("Creating relation for datasource...");
|
||||
cce.execute(Process::getRelation, CONTEX_RELATION_DATASOURCE, ModelSupport.getIdPrefix(Datasource.class));
|
||||
cce
|
||||
.execute(
|
||||
Process::getRelation, CONTEX_RELATION_DATASOURCE, ModelSupport.getIdPrefix(Datasource.class),
|
||||
masterDuplicateList);
|
||||
|
||||
log.info("Creating relations for projects... ");
|
||||
cce
|
||||
|
@ -83,6 +94,20 @@ public class CreateContextRelation implements Serializable {
|
|||
|
||||
}
|
||||
|
||||
private List<MasterDuplicate> readMasterDuplicate(String masterDuplicatePath) throws IOException {
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
Path hdfsReadPath = new Path(masterDuplicatePath);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(fileSystem.open(hdfsReadPath)));
|
||||
List<MasterDuplicate> mdlist = new ArrayList<>();
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
String line;
|
||||
|
||||
while ((line = reader.readLine()) != null) {
|
||||
mdlist.add(mapper.readValue(line, MasterDuplicate.class));
|
||||
}
|
||||
return mdlist;
|
||||
}
|
||||
|
||||
private void close() throws IOException {
|
||||
writer.close();
|
||||
}
|
||||
|
@ -111,9 +136,15 @@ public class CreateContextRelation implements Serializable {
|
|||
|
||||
public void execute(final Function<ContextInfo, List<Relation>> producer, String category, String prefix) {
|
||||
|
||||
execute(producer, category, prefix, null);
|
||||
}
|
||||
|
||||
public void execute(final Function<ContextInfo, List<Relation>> producer, String category, String prefix,
|
||||
List<MasterDuplicate> masterDuplicateList) {
|
||||
|
||||
final Consumer<ContextInfo> consumer = ci -> producer.apply(ci).forEach(this::writeEntity);
|
||||
|
||||
queryInformationSystem.getContextRelation(consumer, category, prefix);
|
||||
queryInformationSystem.getContextRelation(consumer, category, prefix, masterDuplicateList);
|
||||
}
|
||||
|
||||
protected void writeEntity(final Relation r) {
|
||||
|
|
|
@ -15,7 +15,6 @@ import org.apache.spark.sql.SparkSession;
|
|||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.model.Provenance;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Node;
|
||||
import eu.dnetlib.dhp.oa.model.graph.RelType;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Relation;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
|
@ -85,7 +84,7 @@ public class Extractor implements Serializable {
|
|||
.orElse(null))
|
||||
.orElse(null);
|
||||
Relation r = getRelation(
|
||||
value.getId(), contextId,
|
||||
value.getId().substring(3), contextId,
|
||||
Constants.RESULT_ENTITY,
|
||||
Constants.CONTEXT_ENTITY,
|
||||
ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP, provenance);
|
||||
|
@ -95,7 +94,7 @@ public class Extractor implements Serializable {
|
|||
hashCodes.add(r.hashCode());
|
||||
}
|
||||
r = getRelation(
|
||||
contextId, value.getId(),
|
||||
contextId, value.getId().substring(3),
|
||||
Constants.CONTEXT_ENTITY,
|
||||
Constants.RESULT_ENTITY,
|
||||
ModelConstants.IS_RELATED_TO,
|
||||
|
@ -164,8 +163,8 @@ public class Extractor implements Serializable {
|
|||
eu.dnetlib.dhp.oa.graph.dump.Constants.HARVESTED,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DEFAULT_TRUST));
|
||||
Relation r = getRelation(
|
||||
value.getId(),
|
||||
cf.getKey(), Constants.RESULT_ENTITY, Constants.DATASOURCE_ENTITY,
|
||||
value.getId().substring(3),
|
||||
cf.getKey().substring(3), Constants.RESULT_ENTITY, Constants.DATASOURCE_ENTITY,
|
||||
resultDatasource, ModelConstants.PROVISION,
|
||||
provenance);
|
||||
if (!hashCodes.contains(r.hashCode())) {
|
||||
|
@ -175,7 +174,7 @@ public class Extractor implements Serializable {
|
|||
}
|
||||
|
||||
r = getRelation(
|
||||
cf.getKey(), value.getId(),
|
||||
cf.getKey().substring(3), value.getId().substring(3),
|
||||
Constants.DATASOURCE_ENTITY, Constants.RESULT_ENTITY,
|
||||
datasourceResult, ModelConstants.PROVISION,
|
||||
provenance);
|
||||
|
@ -191,8 +190,10 @@ public class Extractor implements Serializable {
|
|||
private static Relation getRelation(String source, String target, String sourceType, String targetType,
|
||||
String relName, String relType, Provenance provenance) {
|
||||
Relation r = new Relation();
|
||||
r.setSource(Node.newInstance(source, sourceType));
|
||||
r.setTarget(Node.newInstance(target, targetType));
|
||||
r.setSource(source);
|
||||
r.setSourceType(sourceType);
|
||||
r.setTarget(target);
|
||||
r.setTargetType(targetType);
|
||||
r.setReltype(RelType.newInstance(relName, relType));
|
||||
r.setProvenance(provenance);
|
||||
return r;
|
||||
|
|
|
@ -55,18 +55,18 @@ public class Process implements Serializable {
|
|||
ci
|
||||
.getDatasourceList()
|
||||
.forEach(ds -> {
|
||||
|
||||
String nodeType = ModelSupport.idPrefixEntity.get(ds.substring(0, 2));
|
||||
String datasourceId = ds;
|
||||
if (ds.startsWith("10|") || ds.startsWith("40|"))
|
||||
datasourceId = ds.substring(3);
|
||||
|
||||
String contextId = Utils.getContextId(ci.getId());
|
||||
relationList
|
||||
.add(
|
||||
Relation
|
||||
.newInstance(
|
||||
Node
|
||||
.newInstance(
|
||||
contextId, eu.dnetlib.dhp.oa.model.graph.Constants.CONTEXT_ENTITY),
|
||||
Node.newInstance(ds, nodeType),
|
||||
contextId, eu.dnetlib.dhp.oa.model.graph.Constants.CONTEXT_ENTITY,
|
||||
datasourceId, nodeType,
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
|
@ -77,10 +77,8 @@ public class Process implements Serializable {
|
|||
.add(
|
||||
Relation
|
||||
.newInstance(
|
||||
Node.newInstance(ds, nodeType),
|
||||
Node
|
||||
.newInstance(
|
||||
contextId, eu.dnetlib.dhp.oa.model.graph.Constants.CONTEXT_ENTITY),
|
||||
datasourceId, nodeType,
|
||||
contextId, eu.dnetlib.dhp.oa.model.graph.Constants.CONTEXT_ENTITY,
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.MasterDuplicate;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||
|
@ -110,6 +111,11 @@ public class QueryInformationSystem {
|
|||
}
|
||||
|
||||
public void getContextRelation(final Consumer<ContextInfo> consumer, String category, String prefix) {
|
||||
getContextRelation(consumer, category, prefix, null);
|
||||
}
|
||||
|
||||
public void getContextRelation(final Consumer<ContextInfo> consumer, String category, String prefix,
|
||||
List<MasterDuplicate> masterDuplicateList) {
|
||||
|
||||
contextRelationResult.forEach(xml -> {
|
||||
ContextInfo cinfo = new ContextInfo();
|
||||
|
@ -129,7 +135,7 @@ public class QueryInformationSystem {
|
|||
String categoryId = el.attributeValue("id");
|
||||
categoryId = categoryId.substring(categoryId.lastIndexOf("::") + 2);
|
||||
if (categoryId.equals(category)) {
|
||||
cinfo.setDatasourceList(getCategoryList(el, prefix));
|
||||
cinfo.setDatasourceList(getCategoryList(el, prefix, masterDuplicateList));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,22 +150,36 @@ public class QueryInformationSystem {
|
|||
}
|
||||
|
||||
@NotNull
|
||||
private List<String> getCategoryList(Element el, String prefix) {
|
||||
private List<String> getCategoryList(Element el, String prefix, List<MasterDuplicate> masterDuplicateList) {
|
||||
List<String> datasourceList = new ArrayList<>();
|
||||
for (Object node : el.selectNodes(".//concept")) {
|
||||
String oid = getOpenaireId((Node) node, prefix);
|
||||
if (oid != null)
|
||||
if (masterDuplicateList == null)
|
||||
datasourceList.add(oid);
|
||||
else
|
||||
datasourceList.add(getMaster(oid, masterDuplicateList));
|
||||
}
|
||||
|
||||
return datasourceList;
|
||||
}
|
||||
|
||||
private String getMaster(String oid, List<MasterDuplicate> masterDuplicateList) {
|
||||
for (MasterDuplicate md : masterDuplicateList) {
|
||||
if (md.getDuplicate().equals(oid))
|
||||
return md.getMaster();
|
||||
}
|
||||
return oid;
|
||||
}
|
||||
|
||||
private String getOpenaireId(Node el, String prefix) {
|
||||
for (Object node : el.selectNodes(".//param")) {
|
||||
Node n = (Node) node;
|
||||
if (n.valueOf("./@name").equals("openaireId")) {
|
||||
return prefix + "|" + n.getText();
|
||||
String id = n.getText();
|
||||
if (id.startsWith(prefix + "|"))
|
||||
return id;
|
||||
return prefix + "|" + id;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -199,6 +219,8 @@ public class QueryInformationSystem {
|
|||
}
|
||||
if (funding.toLowerCase().contains("h2020")) {
|
||||
nsp = "corda__h2020::";
|
||||
} else if (funding.toLowerCase().contains("he")) {
|
||||
nsp = "corda_____he::";
|
||||
} else {
|
||||
nsp = "corda_______::";
|
||||
}
|
||||
|
|
|
@ -107,30 +107,137 @@ public class SparkCollectAndSave implements Serializable {
|
|||
|
||||
// Dataset<String> dumpedIds = Utils.getEntitiesId(spark, outputPath);
|
||||
|
||||
Dataset<Relation> relations = Utils
|
||||
.readPath(spark, inputPath + "/relation/publication", Relation.class)
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/dataset", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/orp", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/software", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/contextOrg", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/context", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/relation", Relation.class));
|
||||
// Dataset<Relation> relations = Utils
|
||||
// .readPath(spark, inputPath + "/relation/publication", Relation.class)
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/dataset", Relation.class))
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/orp", Relation.class))
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/software", Relation.class))
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/contextOrg", Relation.class))
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/context", Relation.class))
|
||||
// .union(Utils.readPath(spark, inputPath + "/relation/relation", Relation.class));
|
||||
|
||||
Utils.getValidRelations(spark, relations, Utils.getEntitiesId(spark, outputPath))
|
||||
// Dataset<Relation> relJoinSource = relations
|
||||
// .joinWith(dumpedIds, relations.col("source.id").equalTo(dumpedIds.col("value")))
|
||||
// .map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(),
|
||||
// Encoders.bean(Relation.class));
|
||||
//
|
||||
// relJoinSource
|
||||
// .joinWith(dumpedIds, relJoinSource.col("target.id").equalTo(dumpedIds.col("value")))
|
||||
// .map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(),
|
||||
// Encoders.bean(Relation.class))
|
||||
// Utils.getValidRelations(relations, Utils.getEntitiesId(spark, outputPath))
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/publication", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/dataset", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/orp", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/software", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/contextOrg", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/context", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/relation", Relation.class)
|
||||
.write()
|
||||
.mode(SaveMode.Append)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/publication", Relation.class),
|
||||
// inputPath + "/relSource/publication");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/dataset", Relation.class),
|
||||
// inputPath + "/relSource/dataset");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/orp", Relation.class),
|
||||
// inputPath + "/relSource/orp");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/software", Relation.class),
|
||||
// inputPath + "/relSource/software");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/contextOrg", Relation.class),
|
||||
// inputPath + "/relSource/contextOrg");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/context", Relation.class),
|
||||
// inputPath + "/relSource/context");
|
||||
// relSource(
|
||||
// inputPath, dumpedIds, Utils
|
||||
// .readPath(spark, inputPath + "/relation/relation", Relation.class),
|
||||
// inputPath + "/relSource/relation");
|
||||
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/publication", Relation.class),
|
||||
// SaveMode.Overwrite);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/dataset", Relation.class),
|
||||
// SaveMode.Append);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/orp", Relation.class),
|
||||
// SaveMode.Append);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/software", Relation.class),
|
||||
// SaveMode.Append);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/contextOrg", Relation.class),
|
||||
// SaveMode.Append);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/context", Relation.class),
|
||||
// SaveMode.Append);
|
||||
// relTarget(
|
||||
// outputPath, dumpedIds, Utils.readPath(spark, inputPath + "/relSource/relation", Relation.class),
|
||||
// SaveMode.Append);
|
||||
|
||||
}
|
||||
|
||||
private static void relTarget(String outputPath, Dataset<String> dumpedIds, Dataset<Relation> relJoinSource,
|
||||
SaveMode saveMode) {
|
||||
relJoinSource
|
||||
.joinWith(dumpedIds, relJoinSource.col("target").equalTo(dumpedIds.col("value")))
|
||||
.map(
|
||||
(MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(),
|
||||
Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.mode(saveMode)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
}
|
||||
|
||||
private static void relSource(String inputPath, Dataset<String> dumpedIds, Dataset<Relation> relations,
|
||||
String outputPath) {
|
||||
|
||||
relations
|
||||
.joinWith(dumpedIds, relations.col("source").equalTo(dumpedIds.col("value")))
|
||||
.map(
|
||||
(MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(),
|
||||
Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static void write(Dataset<GraphResult> dataSet, String outputPath) {
|
||||
|
|
|
@ -204,6 +204,7 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
.map(
|
||||
(MapFunction<E, Project>) p -> mapProject((eu.dnetlib.dhp.schema.oaf.Project) p),
|
||||
Encoders.bean(Project.class))
|
||||
.filter((FilterFunction<Project>) p -> p != null)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option(COMPRESSION, GZIP)
|
||||
|
@ -211,9 +212,11 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
}
|
||||
|
||||
private static Datasource mapDatasource(eu.dnetlib.dhp.schema.oaf.Datasource d) {
|
||||
if (Boolean.TRUE.equals(d.getDataInfo().getDeletedbyinference()))
|
||||
return null;
|
||||
Datasource datasource = new Datasource();
|
||||
|
||||
datasource.setId(d.getId());
|
||||
datasource.setId(d.getId().substring(3));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOriginalId())
|
||||
|
@ -337,6 +340,10 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
.ofNullable(d.getJournal())
|
||||
.ifPresent(j -> datasource.setJournal(getContainer(j)));
|
||||
|
||||
// Optional
|
||||
// .ofNullable(d.getMeasures())
|
||||
// .ifPresent(m -> datasource.setIndicators(Utils.getIndicator(d.getMeasures())));
|
||||
|
||||
return datasource;
|
||||
|
||||
}
|
||||
|
@ -392,11 +399,14 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
}
|
||||
|
||||
private static Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p) throws DocumentException {
|
||||
if (Boolean.TRUE.equals(p.getDataInfo().getDeletedbyinference()))
|
||||
return null;
|
||||
|
||||
Project project = new Project();
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getId())
|
||||
.ifPresent(id -> project.setId(id));
|
||||
.ifPresent(id -> project.setId(id.substring(3)));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getWebsiteurl())
|
||||
|
@ -502,6 +512,9 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
}
|
||||
project.setFunding(funList);
|
||||
|
||||
// if (Optional.ofNullable(p.getMeasures()).isPresent()) {
|
||||
// project.setIndicators(Utils.getIndicator(p.getMeasures()));
|
||||
// }
|
||||
return project;
|
||||
}
|
||||
|
||||
|
@ -564,6 +577,10 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
eu.dnetlib.dhp.schema.oaf.Organization org) {
|
||||
if (Boolean.TRUE.equals(org.getDataInfo().getDeletedbyinference()))
|
||||
return null;
|
||||
if (!Optional.ofNullable(org.getLegalname()).isPresent()
|
||||
&& !Optional.ofNullable(org.getLegalshortname()).isPresent())
|
||||
return null;
|
||||
|
||||
Organization organization = new Organization();
|
||||
|
||||
Optional
|
||||
|
@ -602,7 +619,7 @@ public class SparkDumpEntitiesJob implements Serializable {
|
|||
|
||||
Optional
|
||||
.ofNullable(org.getId())
|
||||
.ifPresent(value -> organization.setId(value));
|
||||
.ifPresent(value -> organization.setId(value.substring(3)));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getPid())
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.slf4j.LoggerFactory;
|
|||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.model.Provenance;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Node;
|
||||
import eu.dnetlib.dhp.oa.model.graph.RelType;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||
|
@ -80,22 +79,18 @@ public class SparkDumpRelationJob implements Serializable {
|
|||
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath, Set<String> removeSet) {
|
||||
Dataset<Relation> relations = Utils.readPath(spark, inputPath, Relation.class);
|
||||
relations
|
||||
.filter((FilterFunction<Relation>) r -> !removeSet.contains(r.getRelClass()))
|
||||
.filter(
|
||||
(FilterFunction<Relation>) r -> !removeSet.contains(r.getRelClass())
|
||||
&& !r.getSubRelType().equalsIgnoreCase("resultService"))
|
||||
.map((MapFunction<Relation, eu.dnetlib.dhp.oa.model.graph.Relation>) relation -> {
|
||||
eu.dnetlib.dhp.oa.model.graph.Relation relNew = new eu.dnetlib.dhp.oa.model.graph.Relation();
|
||||
relNew
|
||||
.setSource(
|
||||
Node
|
||||
.newInstance(
|
||||
relation.getSource(),
|
||||
ModelSupport.idPrefixEntity.get(relation.getSource().substring(0, 2))));
|
||||
.setSource(relation.getSource().substring(3));
|
||||
relNew.setSourceType(ModelSupport.idPrefixEntity.get(relation.getSource().substring(0, 2)));
|
||||
|
||||
relNew
|
||||
.setTarget(
|
||||
Node
|
||||
.newInstance(
|
||||
relation.getTarget(),
|
||||
ModelSupport.idPrefixEntity.get(relation.getTarget().substring(0, 2))));
|
||||
.setTarget(relation.getTarget().substring(3));
|
||||
relNew.setTargetType(ModelSupport.idPrefixEntity.get(relation.getTarget().substring(0, 2)));
|
||||
|
||||
relNew
|
||||
.setReltype(
|
||||
|
|
|
@ -24,7 +24,6 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.model.Provenance;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Node;
|
||||
import eu.dnetlib.dhp.oa.model.graph.RelType;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
|
@ -155,8 +154,9 @@ public class SparkOrganizationRelation implements Serializable {
|
|||
.add(
|
||||
eu.dnetlib.dhp.oa.model.graph.Relation
|
||||
.newInstance(
|
||||
Node.newInstance(id, Constants.CONTEXT_ENTITY),
|
||||
Node.newInstance(organization, ModelSupport.idPrefixEntity.get(organization.substring(0, 2))),
|
||||
id, Constants.CONTEXT_ENTITY,
|
||||
organization.substring(3),
|
||||
ModelSupport.idPrefixEntity.get(organization.substring(0, 2)),
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
|
@ -167,8 +167,8 @@ public class SparkOrganizationRelation implements Serializable {
|
|||
.add(
|
||||
eu.dnetlib.dhp.oa.model.graph.Relation
|
||||
.newInstance(
|
||||
Node.newInstance(organization, ModelSupport.idPrefixEntity.get(organization.substring(0, 2))),
|
||||
Node.newInstance(id, Constants.CONTEXT_ENTITY),
|
||||
organization.substring(3), ModelSupport.idPrefixEntity.get(organization.substring(0, 2)),
|
||||
id, Constants.CONTEXT_ENTITY,
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
|
|
|
@ -4,14 +4,15 @@ package eu.dnetlib.dhp.oa.graph.dump.complete;
|
|||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.apache.spark.sql.*;
|
||||
import org.apache.spark.sql.types.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -59,78 +60,42 @@ public class SparkSelectValidRelationsJob implements Serializable {
|
|||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
selectValidRelation(spark, inputPath, outputPath);
|
||||
selectValidRelation2(spark, inputPath, outputPath);
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void selectValidRelation(SparkSession spark, String inputPath, String outputPath) {
|
||||
Dataset<Relation> relation = Utils.readPath(spark, inputPath + "/relation", Relation.class);
|
||||
Dataset<Publication> publication = Utils.readPath(spark, inputPath + "/publication", Publication.class);
|
||||
Dataset<eu.dnetlib.dhp.schema.oaf.Dataset> dataset = Utils
|
||||
.readPath(spark, inputPath + "/dataset", eu.dnetlib.dhp.schema.oaf.Dataset.class);
|
||||
Dataset<Software> software = Utils.readPath(spark, inputPath + "/software", Software.class);
|
||||
Dataset<OtherResearchProduct> other = Utils
|
||||
.readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class);
|
||||
Dataset<Organization> organization = Utils.readPath(spark, inputPath + "/organization", Organization.class);
|
||||
Dataset<Project> project = Utils.readPath(spark, inputPath + "/project", Project.class);
|
||||
Dataset<Datasource> datasource = Utils.readPath(spark, inputPath + "/datasource", Datasource.class);
|
||||
|
||||
relation.createOrReplaceTempView("relation");
|
||||
publication.createOrReplaceTempView("publication");
|
||||
dataset.createOrReplaceTempView("dataset");
|
||||
other.createOrReplaceTempView("other");
|
||||
software.createOrReplaceTempView("software");
|
||||
organization.createOrReplaceTempView("organization");
|
||||
project.createOrReplaceTempView("project");
|
||||
datasource.createOrReplaceTempView("datasource");
|
||||
private static void selectValidRelation2(SparkSession spark, String inputPath, String outputPath) {
|
||||
final StructType structureSchema = new StructType()
|
||||
.fromDDL("`id` STRING, `dataInfo` STRUCT<`deletedbyinference`:BOOLEAN,`invisible`:BOOLEAN>");
|
||||
|
||||
org.apache.spark.sql.Dataset<Row> df = spark.createDataFrame(new ArrayList<Row>(), structureSchema);
|
||||
List<String> entities = Arrays
|
||||
.asList(
|
||||
"publication", "dataset", "otherresearchproduct", "software", "organization", "project", "datasource");
|
||||
for (String e : entities)
|
||||
df = df
|
||||
.union(
|
||||
spark
|
||||
.sql(
|
||||
"SELECT id " +
|
||||
"FROM publication " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM dataset " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM other " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM software " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM organization " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM project " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM datasource " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false ")
|
||||
.createOrReplaceTempView("identifiers");
|
||||
.read()
|
||||
.schema(structureSchema)
|
||||
.json(inputPath + "/" + e)
|
||||
.filter("dataInfo.deletedbyinference != true and dataInfo.invisible != true"));
|
||||
|
||||
spark
|
||||
.sql(
|
||||
"SELECT relation.* " +
|
||||
"FROM relation " +
|
||||
"JOIN identifiers i1 " +
|
||||
"ON source = i1.id " +
|
||||
"JOIN identifiers i2 " +
|
||||
"ON target = i2.id " +
|
||||
"WHERE datainfo.deletedbyinference = false")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
org.apache.spark.sql.Dataset<Row> relations = spark
|
||||
.read()
|
||||
.schema(Encoders.bean(Relation.class).schema())
|
||||
.json(inputPath + "/relation")
|
||||
.filter("dataInfo.deletedbyinference == false");
|
||||
|
||||
relations
|
||||
.join(df, relations.col("source").equalTo(df.col("id")), "leftsemi")
|
||||
.join(df, relations.col("target").equalTo(df.col("id")), "leftsemi")
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,7 +110,9 @@ public class SparkFindResultsRelatedToCountry implements Serializable {
|
|||
|
||||
Dataset<Organization> organizationsInCountry = Utils
|
||||
.readPath(spark, inputPath + "/organization", Organization.class)
|
||||
.filter((FilterFunction<Organization>) o -> o.getCountry().getClassid().equals(country));
|
||||
.filter(
|
||||
(FilterFunction<Organization>) o -> !o.getDataInfo().getDeletedbyinference()
|
||||
&& o.getCountry().getClassid().equals(country));
|
||||
|
||||
Dataset<Relation> relsOrganizationResults = Utils
|
||||
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||
|
|
|
@ -11,6 +11,7 @@ import java.util.stream.Collectors;
|
|||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.*;
|
||||
|
@ -18,6 +19,8 @@ import org.jetbrains.annotations.NotNull;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.model.community.CommunityResult;
|
||||
|
@ -30,6 +33,7 @@ import eu.dnetlib.dhp.oa.model.community.Project;
|
|||
*/
|
||||
public class SparkDumpFunderResults implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkDumpFunderResults.class);
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
|
@ -65,14 +69,24 @@ public class SparkDumpFunderResults implements Serializable {
|
|||
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||
log.info("Number of result {}", result.count());
|
||||
|
||||
Dataset<String> tmp = result
|
||||
.flatMap((FlatMapFunction<CommunityResult, String>) cr -> cr.getProjects().stream().map(p -> {
|
||||
return getFunderName(p);
|
||||
}).collect(Collectors.toList()).iterator(), Encoders.STRING())
|
||||
.distinct();
|
||||
List<String> funderList = tmp.collectAsList();
|
||||
funderList.forEach(funder -> {
|
||||
dumpResults(funder, result, outputPath);
|
||||
funderList.stream().parallel().forEach(funder -> {
|
||||
result
|
||||
.filter(
|
||||
(FilterFunction<CommunityResult>) r -> Optional.ofNullable(r.getProjects()).isPresent() &&
|
||||
r.getProjects().stream().anyMatch(p -> getFunderName(p).equals(funder)))
|
||||
.map((MapFunction<CommunityResult, String>) r -> MAPPER.writeValueAsString(r), Encoders.STRING())
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.text(outputPath + "/" + funder);
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -86,7 +100,7 @@ public class SparkDumpFunderResults implements Serializable {
|
|||
}
|
||||
return fName;
|
||||
} else {
|
||||
String fName = p.getId().substring(3, p.getId().indexOf("_")).toUpperCase();
|
||||
String fName = p.getId().substring(0, p.getId().indexOf("_")).toUpperCase();
|
||||
if (fName.equalsIgnoreCase("ec")) {
|
||||
if (p.getId().contains("he")) {
|
||||
fName += "_HE";
|
||||
|
@ -108,23 +122,4 @@ public class SparkDumpFunderResults implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
private static void dumpResults(String funder, Dataset<CommunityResult> results, String outputPath) {
|
||||
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
||||
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
||||
return null;
|
||||
}
|
||||
for (Project p : r.getProjects()) {
|
||||
String fName = getFunderName(p);
|
||||
if (fName.equalsIgnoreCase(funder)) {
|
||||
return r;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}, Encoders.bean(CommunityResult.class))
|
||||
.filter(Objects::nonNull)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/" + funder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,12 @@ public class ProjectsSubsetSparkJob implements Serializable {
|
|||
String projectListPath) {
|
||||
Dataset<String> projectList = spark.read().textFile(projectListPath);
|
||||
Dataset<Project> projects;
|
||||
projects = Utils.readPath(spark, inputPath, Project.class);
|
||||
projects = Utils
|
||||
.readPath(spark, inputPath, Project.class)
|
||||
.map((MapFunction<Project, Project>) p -> {
|
||||
p.setId("40|" + p.getId());
|
||||
return p;
|
||||
}, Encoders.bean(Project.class));
|
||||
projects
|
||||
.joinWith(projectList, projects.col("id").equalTo(projectList.col("value")), "left")
|
||||
.map((MapFunction<Tuple2<Project, String>, Project>) t2 -> {
|
||||
|
|
|
@ -73,7 +73,7 @@ public class SparkSelectValidRelation implements Serializable {
|
|||
// read the results
|
||||
|
||||
getValidRelations(
|
||||
spark, Utils
|
||||
Utils
|
||||
.readPath(spark, relationPath, Relation.class),
|
||||
getEntitiesId(spark, inputPath))
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ public class SparkSelectValidRelationContext implements Serializable {
|
|||
.readPath(spark, contextRelationPath + "/context", Relation.class)
|
||||
.union(Utils.readPath(spark, contextRelationPath + "/contextOrg", Relation.class))
|
||||
.map(
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(r.getSource().getId(), r),
|
||||
(MapFunction<Relation, Tuple2<String, Relation>>) r -> new Tuple2<>(r.getSource(), r),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
Dataset<ResearchCommunity> allowedContext = Utils
|
||||
|
@ -118,7 +118,7 @@ public class SparkSelectValidRelationContext implements Serializable {
|
|||
.joinWith(dumpedIds, relationSource.col("_1").equalTo(dumpedIds.col("value")))
|
||||
.map(
|
||||
(MapFunction<Tuple2<Tuple2<String, Relation>, String>, Tuple2<String, Relation>>) t2 -> new Tuple2<>(
|
||||
t2._1()._2().getTarget().getId(), t2._1()._2()),
|
||||
t2._1()._2().getTarget(), t2._1()._2()),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
relJoinSource
|
||||
|
@ -135,7 +135,7 @@ public class SparkSelectValidRelationContext implements Serializable {
|
|||
.joinWith(allowedContext, relationSource.col("_1").equalTo(allowedContext.col("id")))
|
||||
.map(
|
||||
(MapFunction<Tuple2<Tuple2<String, Relation>, ResearchCommunity>, Tuple2<String, Relation>>) t2 -> new Tuple2<>(
|
||||
t2._1()._2().getTarget().getId(), t2._1()._2()),
|
||||
t2._1()._2().getTarget(), t2._1()._2()),
|
||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
relJoinSource
|
||||
|
|
|
@ -122,8 +122,8 @@ public class ZenodoAPIClient implements Serializable {
|
|||
// Get response body
|
||||
json = response.body().string();
|
||||
|
||||
eu.dnetlib.dhp.common.api.zenodo.ZenodoModel newSubmission = new Gson()
|
||||
.fromJson(json, eu.dnetlib.dhp.common.api.zenodo.ZenodoModel.class);
|
||||
ZenodoModel newSubmission = new Gson()
|
||||
.fromJson(json, ZenodoModel.class);
|
||||
this.bucket = newSubmission.getLinks().getBucket();
|
||||
this.deposition_id = newSubmission.getId();
|
||||
|
||||
|
@ -374,31 +374,24 @@ public class ZenodoAPIClient implements Serializable {
|
|||
|
||||
this.deposition_id = deposition_id;
|
||||
|
||||
String json = "{}";
|
||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
||||
|
||||
URL url = new URL(urlString + "/" + deposition_id);
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
Request request = new Request.Builder()
|
||||
.url(urlString + "/" + deposition_id)
|
||||
.addHeader("Authorization", "Bearer " + access_token)
|
||||
.build();
|
||||
|
||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
||||
conn.setRequestMethod("POST");
|
||||
conn.setDoOutput(true);
|
||||
try (OutputStream os = conn.getOutputStream()) {
|
||||
byte[] input = json.getBytes("utf-8");
|
||||
os.write(input, 0, input.length);
|
||||
}
|
||||
try (Response response = httpClient.newCall(request).execute()) {
|
||||
|
||||
String body = getBody(conn);
|
||||
if (!response.isSuccessful())
|
||||
throw new IOException("Unexpected code " + response + response.body().string());
|
||||
|
||||
int responseCode = conn.getResponseCode();
|
||||
conn.disconnect();
|
||||
|
||||
if (!checkOKStatus(responseCode))
|
||||
throw new IOException("Unexpected code " + responseCode + body);
|
||||
|
||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
||||
ZenodoModel zenodoModel = new Gson()
|
||||
.fromJson(response.body().string(), ZenodoModel.class);
|
||||
bucket = zenodoModel.getLinks().getBucket();
|
||||
return response.code();
|
||||
|
||||
return responseCode;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -80,8 +80,8 @@
|
|||
</configuration>
|
||||
</global>
|
||||
|
||||
<!-- <start to="save_community_map" />-->
|
||||
<start to="make_archive" />
|
||||
<start to="save_community_map" />
|
||||
<!-- <start to="make_archive" />-->
|
||||
|
||||
<action name="save_community_map">
|
||||
<java>
|
||||
|
@ -239,34 +239,6 @@
|
|||
|
||||
<join name="join_dump" to="fork_dump_community"/>
|
||||
|
||||
<action name="select_subset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Select valid table relation </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.subset.SparkSelectSubset</class>
|
||||
<jar>dump-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--removeSet</arg><arg>${removeSet}</arg>
|
||||
|
||||
</spark>
|
||||
<ok to="fork_dump_community"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
||||
<fork name="fork_dump_community">
|
||||
<path start="dump_publication"/>
|
||||
<path start="dump_dataset"/>
|
||||
|
|
|
@ -17,6 +17,11 @@
|
|||
"paramLongName": "nameNode",
|
||||
"paramDescription": "the name node",
|
||||
"paramRequired": true
|
||||
},{
|
||||
"paramName": "md",
|
||||
"paramLongName": "masterDuplicate",
|
||||
"paramDescription": "the master duplicate path for datasource deduplication",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
||||
|
||||
|
|
|
@ -16,5 +16,11 @@
|
|||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "sb",
|
||||
"paramLongName": "substring",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -256,6 +256,18 @@
|
|||
<name>resultAggregation</name>
|
||||
<value>${resultAggregation}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresURL</name>
|
||||
<value>${postgresURL}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresUser</name>
|
||||
<value>${postgresUser}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresPassword</name>
|
||||
<value>${postgresPassword}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="make_archive" />
|
||||
|
|
|
@ -85,9 +85,20 @@
|
|||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="fork_dump" />
|
||||
|
||||
<start to="get_master_duplicate" />
|
||||
|
||||
<action name="get_master_duplicate">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.subset.ReadMasterDuplicateFromDB</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${workingDir}/masterduplicate</arg>
|
||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||
</java>
|
||||
<ok to="fork_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="fork_dump">
|
||||
<path start="dump_publication"/>
|
||||
|
@ -350,6 +361,7 @@
|
|||
<arg>--hdfsPath</arg><arg>${workingDir}/relation/context</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||
<arg>--masterDuplicate</arg><arg>${workingDir}/masterduplicate</arg>
|
||||
</java>
|
||||
<ok to="join_context"/>
|
||||
<error to="Kill"/>
|
||||
|
|
|
@ -102,6 +102,7 @@
|
|||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
<arg>--substring</arg><arg>false</arg>
|
||||
</spark>
|
||||
<ok to="fork_result_linked_to_projects"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -123,7 +124,7 @@
|
|||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
||||
<jar>dump-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-memory=9G
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
|
@ -131,6 +132,7 @@
|
|||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
--conf spark.sql.shuffle.partitions=3840
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
|
|
|
@ -649,6 +649,7 @@
|
|||
<arg>--hdfsPath</arg><arg>${workingDir}/dump/relation/context</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||
<arg>--masterDuplicate</arg><arg>${workingDir}/masterduplicate</arg>
|
||||
</java>
|
||||
<ok to="join_context"/>
|
||||
<error to="Kill"/>
|
||||
|
|
|
@ -4,6 +4,7 @@ package eu.dnetlib.dhp.oa.graph.dump;
|
|||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
@ -20,26 +21,16 @@ import org.junit.jupiter.api.Test;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.SparkDumpResult;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolver;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.criteria.VerbResolverFactory;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.ProtoMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.selectionconstraints.SelectionConstraints;
|
||||
import eu.dnetlib.dhp.oa.model.*;
|
||||
import eu.dnetlib.dhp.oa.model.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.oa.model.graph.GraphResult;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
|
||||
//@Disabled
|
||||
public class DumpJobTest {
|
||||
|
@ -149,7 +140,7 @@ public class DumpJobTest {
|
|||
GraphResult gr = verificationDataset.first();
|
||||
|
||||
Assertions.assertTrue(Optional.ofNullable(gr.getIndicators().getUsageCounts()).isPresent());
|
||||
Assertions.assertFalse(Optional.ofNullable(gr.getIndicators().getImpactMeasures()).isPresent());
|
||||
Assertions.assertFalse(Optional.ofNullable(gr.getIndicators().getBipIndicators()).isPresent());
|
||||
|
||||
}
|
||||
|
||||
|
@ -356,7 +347,7 @@ public class DumpJobTest {
|
|||
|
||||
Assertions.assertTrue(null == gr.getGeolocation() || gr.getGeolocation().size() == 0);
|
||||
|
||||
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
||||
Assertions.assertEquals("pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
||||
|
||||
Assertions.assertEquals(1, gr.getOriginalId().size());
|
||||
Assertions
|
||||
|
@ -372,19 +363,11 @@ public class DumpJobTest {
|
|||
Assertions.assertEquals("2020-03-23T00:20:51.392Z", gr.getDateofcollection());
|
||||
|
||||
Assertions.assertTrue(Optional.ofNullable(gr.getIndicators().getUsageCounts()).isPresent());
|
||||
Assertions.assertTrue(Optional.ofNullable(gr.getIndicators().getImpactMeasures()).isPresent());
|
||||
Assertions.assertTrue(Optional.ofNullable(gr.getIndicators().getBipIndicators()).isPresent());
|
||||
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getImpactMeasures().getInfluence() != null);
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getImpactMeasures().getPopularity_alt() != null);
|
||||
.assertEquals(5, gr.getIndicators().getBipIndicators().size());
|
||||
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getImpactMeasures().getPopularity() != null);
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getImpactMeasures().getInfluence_alt() != null);
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getImpactMeasures().getImpulse() != null);
|
||||
Assertions
|
||||
.assertTrue(gr.getIndicators().getUsageCounts() != null);
|
||||
Assertions
|
||||
|
@ -392,20 +375,31 @@ public class DumpJobTest {
|
|||
Assertions
|
||||
.assertTrue(Integer.valueOf(gr.getIndicators().getUsageCounts().getViews()) >= 0);
|
||||
|
||||
Assertions.assertEquals("6.01504990349e-09", gr.getIndicators().getImpactMeasures().getInfluence().getScore());
|
||||
Assertions.assertEquals("C", gr.getIndicators().getImpactMeasures().getInfluence().getClazz());
|
||||
List<Score> bip = gr.getIndicators().getBipIndicators();
|
||||
for (Score in : bip) {
|
||||
switch (in.getIndicator()) {
|
||||
case "influence":
|
||||
Assertions.assertEquals("6.01504990349e-09", in.getScore());
|
||||
Assertions.assertEquals("C", in.getClazz());
|
||||
break;
|
||||
case "popularity_alt":
|
||||
Assertions.assertEquals("2.304", in.getScore());
|
||||
Assertions.assertEquals("C", in.getClazz());
|
||||
break;
|
||||
case "popularity":
|
||||
Assertions.assertEquals("1.81666032463e-08", in.getScore());
|
||||
Assertions.assertEquals("C", in.getClazz());
|
||||
break;
|
||||
case "influence_alt":
|
||||
Assertions.assertEquals("8.0", in.getScore());
|
||||
Assertions.assertEquals("C", in.getClazz());
|
||||
break;
|
||||
case "impulse":
|
||||
Assertions.assertEquals("8.0", in.getScore());
|
||||
Assertions.assertEquals("C", in.getClazz());
|
||||
}
|
||||
|
||||
Assertions.assertEquals("2.304", gr.getIndicators().getImpactMeasures().getPopularity_alt().getScore());
|
||||
Assertions.assertEquals("C", gr.getIndicators().getImpactMeasures().getPopularity_alt().getClazz());
|
||||
|
||||
Assertions.assertEquals("1.81666032463e-08", gr.getIndicators().getImpactMeasures().getPopularity().getScore());
|
||||
Assertions.assertEquals("C", gr.getIndicators().getImpactMeasures().getPopularity().getClazz());
|
||||
|
||||
Assertions.assertEquals("8.0", gr.getIndicators().getImpactMeasures().getInfluence_alt().getScore());
|
||||
Assertions.assertEquals("C", gr.getIndicators().getImpactMeasures().getInfluence_alt().getClazz());
|
||||
|
||||
Assertions.assertEquals("8.0", gr.getIndicators().getImpactMeasures().getImpulse().getScore());
|
||||
Assertions.assertEquals("C", gr.getIndicators().getImpactMeasures().getImpulse().getClazz());
|
||||
}
|
||||
|
||||
Assertions.assertEquals("0", gr.getIndicators().getUsageCounts().getDownloads());
|
||||
Assertions.assertEquals("1", gr.getIndicators().getUsageCounts().getViews());
|
||||
|
@ -686,18 +680,18 @@ public class DumpJobTest {
|
|||
|
||||
Assertions.assertEquals(1, cr.getCollectedfrom().size());
|
||||
Assertions
|
||||
.assertEquals("10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey());
|
||||
.assertEquals("openaire____::fdc7e0400d8c1634cdaf8051dbae23db", cr.getCollectedfrom().get(0).getKey());
|
||||
Assertions.assertEquals("Pensoft", cr.getCollectedfrom().get(0).getValue());
|
||||
|
||||
Assertions.assertEquals(1, cr.getInstance().size());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"10|openaire____::fdc7e0400d8c1634cdaf8051dbae23db",
|
||||
"openaire____::fdc7e0400d8c1634cdaf8051dbae23db",
|
||||
cr.getInstance().get(0).getCollectedfrom().getKey());
|
||||
Assertions.assertEquals("Pensoft", cr.getInstance().get(0).getCollectedfrom().getValue());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"10|openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey());
|
||||
"openaire____::e707e544b9a5bd23fc27fbfa65eb60dd", cr.getInstance().get(0).getHostedby().getKey());
|
||||
Assertions.assertEquals("One Ecosystem", cr.getInstance().get(0).getHostedby().getValue());
|
||||
|
||||
}
|
||||
|
@ -1028,9 +1022,9 @@ public class DumpJobTest {
|
|||
|
||||
Assertions.assertTrue(temp.count() == 2);
|
||||
|
||||
Assertions.assertTrue(temp.filter("id = '50|datacite____::05c611fdfc93d7a2a703d1324e28104a'").count() == 1);
|
||||
Assertions.assertTrue(temp.filter("id = 'datacite____::05c611fdfc93d7a2a703d1324e28104a'").count() == 1);
|
||||
|
||||
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
||||
Assertions.assertTrue(temp.filter("id = 'dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
||||
|
||||
temp = spark
|
||||
.sql(
|
||||
|
@ -1043,7 +1037,7 @@ public class DumpJobTest {
|
|||
.assertEquals(
|
||||
"3131.64",
|
||||
temp
|
||||
.filter("id = '50|datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||
.filter("id = 'datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getString(1));
|
||||
|
@ -1051,7 +1045,7 @@ public class DumpJobTest {
|
|||
.assertEquals(
|
||||
"EUR",
|
||||
temp
|
||||
.filter("id = '50|datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||
.filter("id = 'datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getString(2));
|
||||
|
@ -1060,7 +1054,7 @@ public class DumpJobTest {
|
|||
.assertEquals(
|
||||
"2578.35",
|
||||
temp
|
||||
.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||
.filter("id = 'dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getString(1));
|
||||
|
@ -1068,7 +1062,7 @@ public class DumpJobTest {
|
|||
.assertEquals(
|
||||
"EUR",
|
||||
temp
|
||||
.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||
.filter("id = 'dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getString(2));
|
||||
|
|
|
@ -82,6 +82,7 @@ public class SplitForCommunityTest {
|
|||
.textFile(workingDir.toString() + "/split/Digital_Humanities_and_Cultural_Heritage")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||
|
||||
System.out.println(tmp.count());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -25,8 +25,8 @@ public class ZenodoUploadTest {
|
|||
|
||||
private static String workingDir;
|
||||
|
||||
private final String URL_STRING = "https://sandbox.zenodo.org/api/deposit/depositions";
|
||||
private final String ACCESS_TOKEN = "OzzOsyucEIHxCEfhlpsMo3myEiwpCza3trCRL7ddfGTAK9xXkIP2MbXd6Vg4";
|
||||
private final String URL_STRING = "https://zenodo.org/api/deposit/depositions";
|
||||
private final String ACCESS_TOKEN = "GxqutB1JnEmdvBafQI2cCjtUvoOs0novDuie3hxCEQUJcErHVMhkJjawIqhb";
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
|
@ -194,4 +194,25 @@ public class ZenodoUploadTest {
|
|||
// Assertions.assertEquals(202, client.publish());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testOnlyUpload() throws Exception, MissingConceptDoiException {
|
||||
|
||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
||||
ACCESS_TOKEN);
|
||||
|
||||
client.uploadOpenDeposition("8144316");
|
||||
|
||||
File file = new File("/Users/miriam.baglioni/Desktop/EOSC_DUMP/publication.tar");
|
||||
// File file = new File(getClass()
|
||||
// .getResource("/eu/dnetlib/dhp/common/api/newVersion2")
|
||||
// .getPath());
|
||||
|
||||
InputStream is = new FileInputStream(file);
|
||||
|
||||
Assertions.assertEquals(200, client.uploadIS3(is, "newVersion_deposition", file.length()));
|
||||
|
||||
// Assertions.assertEquals(202, client.publish());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.subset.MasterDuplicate;
|
||||
import eu.dnetlib.dhp.oa.model.graph.Relation;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||
|
@ -562,12 +563,12 @@ class CreateRelationTest {
|
|||
.assertTrue(
|
||||
rList
|
||||
.stream()
|
||||
.map(r -> r.getSource().getId())
|
||||
.map(r -> r.getSource())
|
||||
.collect(Collectors.toSet())
|
||||
.contains(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))));
|
||||
|
||||
|
@ -579,11 +580,11 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.collect(Collectors.toList())
|
||||
|
@ -597,11 +598,11 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getTarget()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.collect(Collectors.toList())
|
||||
|
@ -612,28 +613,28 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.map(r -> r.getTarget().getId())
|
||||
.map(r -> r.getTarget())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp.contains("10|re3data_____::9ebe127e5f3a0bf401875690f3bb6b81") &&
|
||||
tmp.contains("10|doajarticles::c6cd4b532e12868c1d760a8d7cda6815") &&
|
||||
tmp.contains("10|doajarticles::a6de4499bb87bf3c01add0a9e2c9ed0b") &&
|
||||
tmp.contains("10|doajarticles::6eb31d13b12bc06bbac06aef63cf33c9") &&
|
||||
tmp.contains("10|doajarticles::0da84e9dfdc8419576169e027baa8028") &&
|
||||
tmp.contains("10|re3data_____::84e123776089ce3c7a33db98d9cd15a8") &&
|
||||
tmp.contains("10|openaire____::c5502a43e76feab55dd00cf50f519125") &&
|
||||
tmp.contains("10|re3data_____::a48f09c562b247a9919acfe195549b47") &&
|
||||
tmp.contains("10|opendoar____::97275a23ca44226c9964043c8462be96") &&
|
||||
tmp.contains("10|doajarticles::2899208a99aa7d142646e0a80bfeef05"));
|
||||
tmp.contains("re3data_____::9ebe127e5f3a0bf401875690f3bb6b81") &&
|
||||
tmp.contains("doajarticles::c6cd4b532e12868c1d760a8d7cda6815") &&
|
||||
tmp.contains("doajarticles::a6de4499bb87bf3c01add0a9e2c9ed0b") &&
|
||||
tmp.contains("doajarticles::6eb31d13b12bc06bbac06aef63cf33c9") &&
|
||||
tmp.contains("doajarticles::0da84e9dfdc8419576169e027baa8028") &&
|
||||
tmp.contains("re3data_____::84e123776089ce3c7a33db98d9cd15a8") &&
|
||||
tmp.contains("openaire____::c5502a43e76feab55dd00cf50f519125") &&
|
||||
tmp.contains("re3data_____::a48f09c562b247a9919acfe195549b47") &&
|
||||
tmp.contains("opendoar____::97275a23ca44226c9964043c8462be96") &&
|
||||
tmp.contains("doajarticles::2899208a99aa7d142646e0a80bfeef05"));
|
||||
|
||||
}
|
||||
|
||||
|
@ -657,12 +658,12 @@ class CreateRelationTest {
|
|||
.assertFalse(
|
||||
rList
|
||||
.stream()
|
||||
.map(r -> r.getSource().getId())
|
||||
.map(r -> r.getSource())
|
||||
.collect(Collectors.toSet())
|
||||
.contains(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))));
|
||||
|
||||
|
@ -674,11 +675,11 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("clarin"))))
|
||||
.collect(Collectors.toList())
|
||||
|
@ -692,11 +693,11 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getTarget()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("clarin"))))
|
||||
.collect(Collectors.toList())
|
||||
|
@ -707,27 +708,135 @@ class CreateRelationTest {
|
|||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
.getId()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID,
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("clarin"))))
|
||||
.map(r -> r.getTarget().getId())
|
||||
.map(r -> r.getTarget())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp.contains("40|corda__h2020::b5a4eb56bf84bef2ebc193306b4d423f") &&
|
||||
tmp.contains("40|corda_______::ef782b2d85676aa3e5a907427feb18c4"));
|
||||
tmp.contains("corda__h2020::b5a4eb56bf84bef2ebc193306b4d423f") &&
|
||||
tmp.contains("corda_______::ef782b2d85676aa3e5a907427feb18c4"));
|
||||
|
||||
rList.forEach(rel -> {
|
||||
if (rel.getSource().getId().startsWith("40|")) {
|
||||
String proj = rel.getSource().getId().substring(3);
|
||||
if (rel.getSourceType().equals("project")) {
|
||||
String proj = rel.getSource();
|
||||
Assertions.assertTrue(proj.substring(0, proj.indexOf("::")).length() == 12);
|
||||
Assertions.assertFalse(proj.startsWith("40|"));
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void test3() {
|
||||
List<ContextInfo> cInfoList = new ArrayList<>();
|
||||
final Consumer<ContextInfo> consumer = ci -> cInfoList.add(ci);
|
||||
|
||||
MasterDuplicate md1 = new MasterDuplicate();
|
||||
md1.setMaster("10|fake________::9ebe127e5f3a0bf401875690f3bb6b81");
|
||||
md1.setDuplicate("10|re3data_____::9ebe127e5f3a0bf401875690f3bb6b81");
|
||||
queryInformationSystem
|
||||
.getContextRelation(
|
||||
consumer, "contentproviders", ModelSupport.getIdPrefix(Datasource.class), Arrays.asList(md1));
|
||||
|
||||
cInfoList.forEach(c -> System.out.println(new Gson().toJson(c)));
|
||||
|
||||
List<Relation> rList = new ArrayList<>();
|
||||
|
||||
cInfoList.forEach(cInfo -> Process.getRelation(cInfo).forEach(rList::add));
|
||||
|
||||
rList.forEach(r -> {
|
||||
try {
|
||||
System.out.println(new ObjectMapper().writeValueAsString(r));
|
||||
} catch (JsonProcessingException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
|
||||
Assertions.assertEquals(34, rList.size());
|
||||
|
||||
Assertions
|
||||
.assertTrue(
|
||||
rList
|
||||
.stream()
|
||||
.map(r -> r.getSource())
|
||||
.collect(Collectors.toSet())
|
||||
.contains(
|
||||
String
|
||||
.format(
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
10,
|
||||
rList
|
||||
.stream()
|
||||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.collect(Collectors.toList())
|
||||
.size());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
10,
|
||||
rList
|
||||
.stream()
|
||||
.filter(
|
||||
r -> r
|
||||
.getTarget()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.collect(Collectors.toList())
|
||||
.size());
|
||||
|
||||
Set<String> tmp = rList
|
||||
.stream()
|
||||
.filter(
|
||||
r -> r
|
||||
.getSource()
|
||||
|
||||
.equals(
|
||||
String
|
||||
.format(
|
||||
"%s::%s",
|
||||
Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5("dh-ch"))))
|
||||
.map(r -> r.getTarget())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp.contains("fake________::9ebe127e5f3a0bf401875690f3bb6b81") &&
|
||||
tmp.contains("doajarticles::c6cd4b532e12868c1d760a8d7cda6815") &&
|
||||
tmp.contains("doajarticles::a6de4499bb87bf3c01add0a9e2c9ed0b") &&
|
||||
tmp.contains("doajarticles::6eb31d13b12bc06bbac06aef63cf33c9") &&
|
||||
tmp.contains("doajarticles::0da84e9dfdc8419576169e027baa8028") &&
|
||||
tmp.contains("re3data_____::84e123776089ce3c7a33db98d9cd15a8") &&
|
||||
tmp.contains("openaire____::c5502a43e76feab55dd00cf50f519125") &&
|
||||
tmp.contains("re3data_____::a48f09c562b247a9919acfe195549b47") &&
|
||||
tmp.contains("opendoar____::97275a23ca44226c9964043c8462be96") &&
|
||||
tmp.contains("doajarticles::2899208a99aa7d142646e0a80bfeef05"));
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.apache.spark.api.java.JavaRDD;
|
|||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.api.java.function.ForeachFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.Row;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
|
@ -23,6 +24,7 @@ import org.slf4j.LoggerFactory;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.NoAvailableEntityTypeException;
|
||||
import eu.dnetlib.dhp.oa.model.Indicator;
|
||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||
import eu.dnetlib.dhp.schema.oaf.Organization;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
|
@ -97,10 +99,11 @@ public class DumpOrganizationProjectDatasourceTest {
|
|||
|
||||
Assertions.assertEquals(15, verificationDataset.count());
|
||||
|
||||
verificationDataset
|
||||
.foreach(
|
||||
(ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Organization>) o -> System.out
|
||||
.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
// TODO write significant assertions
|
||||
// verificationDataset
|
||||
// .foreach(
|
||||
// (ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Organization>) o -> System.out
|
||||
// .println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
|
||||
}
|
||||
|
||||
|
@ -132,10 +135,37 @@ public class DumpOrganizationProjectDatasourceTest {
|
|||
|
||||
Assertions.assertEquals(12, verificationDataset.count());
|
||||
|
||||
Assertions.assertEquals(10, verificationDataset.filter("indicators is NULL").count());
|
||||
Assertions.assertEquals(2, verificationDataset.filter("indicators is not NULL").count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
verificationDataset
|
||||
.foreach(
|
||||
(ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Project>) o -> System.out
|
||||
.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
.filter("indicators is not NULL AND id == 'aka_________::01bb7b48e29d732a1c7bc5150b9195c4'")
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
verificationDataset
|
||||
.filter("indicators is not NULL AND id == 'aka_________::9d1af21dbd0f5bc719f71553d19a6b3a'")
|
||||
.count());
|
||||
|
||||
// eu.dnetlib.dhp.oa.model.graph.Project p = tmp
|
||||
// .filter(pr -> pr.getId().equals("aka_________::01bb7b48e29d732a1c7bc5150b9195c4"))
|
||||
// .first();
|
||||
// Assertions.assertEquals("2019", p.getIndicators().getUsageCounts().getDownloads());
|
||||
// Assertions.assertEquals("1804", p.getIndicators().getUsageCounts().getViews());
|
||||
// Assertions.assertNull(p.getIndicators().getImpactMeasures());
|
||||
|
||||
// p = tmp.filter(pr -> pr.getId().equals("aka_________::9d1af21dbd0f5bc719f71553d19a6b3a")).first();
|
||||
// Assertions.assertEquals("139", p.getIndicators().getUsageCounts().getDownloads());
|
||||
// Assertions.assertEquals("53", p.getIndicators().getUsageCounts().getViews());
|
||||
// Assertions.assertNull(p.getIndicators().getImpactMeasures());
|
||||
// TODO write significant assertions
|
||||
// verificationDataset
|
||||
// .foreach(
|
||||
// (ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Project>) o -> System.out
|
||||
// .println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
|
||||
}
|
||||
|
||||
|
@ -166,10 +196,38 @@ public class DumpOrganizationProjectDatasourceTest {
|
|||
|
||||
Assertions.assertEquals(5, verificationDataset.count());
|
||||
|
||||
Assertions.assertEquals(3, verificationDataset.filter("indicators is NULL").count());
|
||||
Assertions.assertEquals(2, verificationDataset.filter("indicators is not NULL").count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
verificationDataset
|
||||
.foreach(
|
||||
(ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Datasource>) o -> System.out
|
||||
.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
.filter("indicators is not NULL AND id == 'doajarticles::1fa6859d71faa77b32d82f278c6ed1df'")
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
verificationDataset
|
||||
.filter("indicators is not NULL AND id == 'doajarticles::9c4b678901e5276d9e3addee566816af'")
|
||||
.count());
|
||||
|
||||
// eu.dnetlib.dhp.oa.model.graph.Datasource p = tmp
|
||||
// .filter(pr -> pr.getId().equals("doajarticles::1fa6859d71faa77b32d82f278c6ed1df"))
|
||||
// .first();
|
||||
// Assertions.assertEquals("47542", p.getIndicators().getUsageCounts().getDownloads());
|
||||
// Assertions.assertEquals("36485", p.getIndicators().getUsageCounts().getViews());
|
||||
// Assertions.assertNull(p.getIndicators().getImpactMeasures());
|
||||
//
|
||||
// p = tmp.filter(pr -> pr.getId().equals("doajarticles::9c4b678901e5276d9e3addee566816af")).first();
|
||||
// Assertions.assertEquals("981357", p.getIndicators().getUsageCounts().getDownloads());
|
||||
// Assertions.assertEquals("646539", p.getIndicators().getUsageCounts().getViews());
|
||||
// Assertions.assertNull(p.getIndicators().getImpactMeasures());
|
||||
|
||||
// TODO write significant assertions
|
||||
// verificationDataset
|
||||
// .foreach(
|
||||
// (ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Datasource>) o -> System.out
|
||||
// .println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -199,10 +257,11 @@ public class DumpOrganizationProjectDatasourceTest {
|
|||
|
||||
Assertions.assertEquals(1, verificationDataset.count());
|
||||
|
||||
verificationDataset
|
||||
.foreach(
|
||||
(ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Datasource>) o -> System.out
|
||||
.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
// TODO write significant assertions
|
||||
// verificationDataset
|
||||
// .foreach(
|
||||
// (ForeachFunction<eu.dnetlib.dhp.oa.model.graph.Datasource>) o -> System.out
|
||||
// .println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ public class DumpRelationTest {
|
|||
|
||||
Dataset<Row> check = spark
|
||||
.sql(
|
||||
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||
"SELECT reltype.name, source, sourceType stype, target,targetType ttype, provenance.provenance "
|
||||
+
|
||||
"from table ");
|
||||
|
||||
|
@ -158,7 +158,7 @@ public class DumpRelationTest {
|
|||
|
||||
Dataset<Row> check = spark
|
||||
.sql(
|
||||
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||
"SELECT reltype.name, source, sourceType stype, target,targetType ttype, provenance.provenance "
|
||||
+
|
||||
"from table ");
|
||||
|
||||
|
@ -229,7 +229,7 @@ public class DumpRelationTest {
|
|||
|
||||
Dataset<Row> check = spark
|
||||
.sql(
|
||||
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||
"SELECT reltype.name, source, sourceType stype, target,targetType ttype, provenance.provenance "
|
||||
+
|
||||
"from table ");
|
||||
|
||||
|
@ -283,7 +283,7 @@ public class DumpRelationTest {
|
|||
|
||||
Dataset<Row> check = spark
|
||||
.sql(
|
||||
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||
"SELECT reltype.name, source, sourceType stype, target,targetType ttype, provenance.provenance "
|
||||
+
|
||||
"from table ");
|
||||
|
||||
|
|
|
@ -90,20 +90,22 @@ public class ExtractRelationFromEntityTest {
|
|||
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
||||
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
9,
|
||||
verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daeab3685c3'").count());
|
||||
verificationDataset.show(false);
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
9,
|
||||
verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daxab3685c3'").count());
|
||||
verificationDataset.filter("source = 'dedup_wf_001::15270b996fa8fd2fb5723daeab3685c3'").count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
9,
|
||||
verificationDataset.filter("source = 'dedup_wf_001::15270b996fa8fd2fb5723daxab3685c3'").count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"IsRelatedTo", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getSource().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getSourceType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getReltype()
|
||||
|
@ -112,7 +114,7 @@ public class ExtractRelationFromEntityTest {
|
|||
Assertions
|
||||
.assertEquals(
|
||||
"relationship", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getSource().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getSourceType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getReltype()
|
||||
|
@ -121,24 +123,22 @@ public class ExtractRelationFromEntityTest {
|
|||
Assertions
|
||||
.assertEquals(
|
||||
"context", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getSource().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getSourceType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getSource()
|
||||
.getType());
|
||||
.getSourceType());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"result", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getSource().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getSourceType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getTarget()
|
||||
.getType());
|
||||
.getTargetType());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"IsRelatedTo", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getTarget().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getTargetType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getReltype()
|
||||
|
@ -147,7 +147,7 @@ public class ExtractRelationFromEntityTest {
|
|||
Assertions
|
||||
.assertEquals(
|
||||
"relationship", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getTarget().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getTargetType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getReltype()
|
||||
|
@ -156,20 +156,18 @@ public class ExtractRelationFromEntityTest {
|
|||
Assertions
|
||||
.assertEquals(
|
||||
"context", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getTarget().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getTargetType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getTarget()
|
||||
.getType());
|
||||
.getTargetType());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
"result", verificationDataset
|
||||
.filter((FilterFunction<Relation>) row -> row.getTarget().getId().startsWith("00"))
|
||||
.filter((FilterFunction<Relation>) row -> row.getTargetType().equals("context"))
|
||||
.collectAsList()
|
||||
.get(0)
|
||||
.getSource()
|
||||
.getType());
|
||||
.getSourceType());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -474,17 +474,17 @@ public class DumpSubsetTest {
|
|||
|
||||
Assertions.assertEquals(10, tmp.count());
|
||||
|
||||
Assertions.assertEquals(5, tmp.filter(r -> r.getSource().getId().startsWith("00")).count());
|
||||
Assertions.assertEquals(5, tmp.filter(r -> r.getTarget().getId().startsWith("00")).count());
|
||||
Assertions.assertEquals(5, tmp.filter(r -> r.getSourceType().equals("context")).count());
|
||||
Assertions.assertEquals(5, tmp.filter(r -> r.getTargetType().equals("context")).count());
|
||||
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getSource().getId().startsWith("10")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getTarget().getId().startsWith("10")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getSourceType().equals("datasource")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getTargetType().equals("datasource")).count());
|
||||
|
||||
Assertions.assertEquals(1, tmp.filter(r -> r.getSource().getId().startsWith("40")).count());
|
||||
Assertions.assertEquals(1, tmp.filter(r -> r.getTarget().getId().startsWith("40")).count());
|
||||
Assertions.assertEquals(1, tmp.filter(r -> r.getSourceType().equals("project")).count());
|
||||
Assertions.assertEquals(1, tmp.filter(r -> r.getTargetType().equals("project")).count());
|
||||
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getSource().getId().startsWith("20")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getTarget().getId().startsWith("20")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getSourceType().equals("organization")).count());
|
||||
Assertions.assertEquals(2, tmp.filter(r -> r.getTargetType().equals("organization")).count());
|
||||
|
||||
}
|
||||
|
||||
|
@ -514,9 +514,9 @@ public class DumpSubsetTest {
|
|||
|
||||
Assertions.assertEquals(102, tmp.count());
|
||||
|
||||
Assertions.assertEquals(51, tmp.filter(r -> r.getSource().getId().startsWith("50|")).count());
|
||||
Assertions.assertEquals(39, tmp.filter(r -> r.getSource().getId().startsWith("10|")).count());
|
||||
Assertions.assertEquals(12, tmp.filter(r -> r.getSource().getId().startsWith("00|")).count());
|
||||
Assertions.assertEquals(51, tmp.filter(r -> r.getSourceType().equals("result")).count());
|
||||
Assertions.assertEquals(39, tmp.filter(r -> r.getSourceType().equals("datasource")).count());
|
||||
Assertions.assertEquals(12, tmp.filter(r -> r.getSourceType().equals("context")).count());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"pid": [{"scheme": "doi", "value": "10.1023/a:1019971625315"}], "contributor": [], "collectedfrom": [{"key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "value": "Crossref"}, {"key": "10|openaire____::5f532a3fc4f1ea403f37070f59a7a53a", "value": "Microsoft Academic Graph"}], "id": "50|doi_________::0027accd79214af151336e8237a2b084", "container": {"issnPrinted": "1607-6729", "conferencedate": null, "vol": "385", "conferenceplace": null, "name": "Doklady Biochemistry and Biophysics", "iss": null, "sp": "228", "edition": null, "issnOnline": null, "ep": "234", "issnLinking": null}, "lastupdatetimestamp": 1649039791345, "author": [{"surname": null, "fullname": "Vladimir S. Saakov", "pid": null, "name": null, "rank": 1}], "instance": [{"refereed": "UNKNOWN", "hostedby": {"key": "10|issn___print::55156520c3996f4d887f858c089d1e5f", "value": "Doklady Biochemistry and Biophysics"}, "url": ["https://doi.org/10.1023/a:1019971625315"], "pid": [{"scheme": "doi", "value": "10.1023/a:1019971625315"}], "publicationdate": "2002-01-01", "collectedfrom": {"key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "value": "Crossref"}, "type": "Article"}], "subjects": [{"provenance": null, "subject": {"scheme": "keyword", "value": "General Chemistry"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "Biochemistry"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "General Medicine"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "Biophysics"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Photosystem II"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Ion"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Chemistry"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Soil salinity"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Analytical chemistry"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Function (biology)"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Pulse (signal processing)"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Fluorescence"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Phototroph"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Kinetic energy"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Photochemistry"}}], "publicationdate": "2002-01-01", "indicators": {"impactMeasures": {"influence": {"score": "4.901964E-9", "class": "C"}, "popularity": {"score": "6.185583E-10", "class": "C"}, "influence_alt": {"score": "3", "class": "C"}, "impulse": {"score": "0", "class": "C"}, "popularity_alt": {"score": "0.03722029", "class": "C"}}}, "dateofcollection": "2022-04-04T02:36:31Z", "type": "publication", "description": [], "format": [], "coverage": [], "publisher": "Springer Science and Business Media LLC", "language": {"code": "und", "label": "Undetermined"}, "country": [], "originalId": ["453197", "10.1023/a:1019971625315", "314096869"], "source": ["Crossref", null], "context": [{"code": "enermaps", "provenance": [{"provenance": "Inferred by OpenAIRE", "trust": "0.8"}], "label": "Energy Research"}]}
|
||||
{"pid": [{"scheme": "doi", "value": "10.1023/a:1019971625315"}], "contributor": [], "collectedfrom": [{"key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "value": "Crossref"}, {"key": "10|openaire____::5f532a3fc4f1ea403f37070f59a7a53a", "value": "Microsoft Academic Graph"}], "id": "50|doi_________::0027accd79214af151336e8237a2b084", "container": {"issnPrinted": "1607-6729", "conferencedate": null, "vol": "385", "conferenceplace": null, "name": "Doklady Biochemistry and Biophysics", "iss": null, "sp": "228", "edition": null, "issnOnline": null, "ep": "234", "issnLinking": null}, "lastupdatetimestamp": 1649039791345, "author": [{"surname": null, "fullname": "Vladimir S. Saakov", "pid": null, "name": null, "rank": 1}], "instance": [{"refereed": "UNKNOWN", "hostedby": {"key": "10|issn___print::55156520c3996f4d887f858c089d1e5f", "value": "Doklady Biochemistry and Biophysics"}, "url": ["https://doi.org/10.1023/a:1019971625315"], "pid": [{"scheme": "doi", "value": "10.1023/a:1019971625315"}], "publicationdate": "2002-01-01", "collectedfrom": {"key": "10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "value": "Crossref"}, "type": "Article"}], "subjects": [{"provenance": null, "subject": {"scheme": "keyword", "value": "General Chemistry"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "Biochemistry"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "General Medicine"}}, {"provenance": null, "subject": {"scheme": "keyword", "value": "Biophysics"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Photosystem II"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Ion"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Chemistry"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Soil salinity"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Analytical chemistry"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Function (biology)"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Pulse (signal processing)"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Fluorescence"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Phototroph"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Kinetic energy"}}, {"provenance": null, "subject": {"scheme": "MAG", "value": "Photochemistry"}}], "publicationdate": "2002-01-01", "indicators": {"bipIndicators": [{"indicator":"influence","score": "4.901964E-9", "class": "C"}, {"indicator":"popularity","score": "6.185583E-10", "class": "C"}, {"indicator": "influence_alt","score": "3", "class": "C"}, {"indicator": "impulse","score": "0", "class": "C"}, {"indicator": "popularity_alt","score": "0.03722029", "class": "C"}]}}, "dateofcollection": "2022-04-04T02:36:31Z", "type": "publication", "description": [], "format": [], "coverage": [], "publisher": "Springer Science and Business Media LLC", "language": {"code": "und", "label": "Undetermined"}, "country": [], "originalId": ["453197", "10.1023/a:1019971625315", "314096869"], "source": ["Crossref", null], "context": [{"code": "enermaps", "provenance": [{"provenance": "Inferred by OpenAIRE", "trust": "0.8"}], "label": "Energy Research"}]}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,6 @@
|
|||
{"id":"00|context_____::e15922110564cf669aaed346e871bc01","acronym":"eutopia","name":"EUTOPIA Open Research Portal","type":"Research Community","description":"<p style=text-align:justify>EUTOPIA is an ambitious alliance of 10 like-minded universities ready to reinvent themselves: the Babeș-Bolyai University in Cluj-Napoca (Romania), the Vrije Universiteit Brussels (Belgium), the Ca'Foscari University of Europe (Italy), CY Cergy Paris Université (France), the Technische Universität Dresden (Germany), the University of Gothenburg (Sweden), the University of Ljubljana (Slovenia), the NOVA University Lisbon (Portugal), the University of Pompeu Fabra (Spain) and the University of Warwick (United Kingdom). Together, these 10 pioneers join forces to build the university of the future.</p>","zenodo_community":null}
|
||||
{"id":"00|context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","acronym":"enermaps","name":"Welcome to EnerMaps Gateway! Find the latest scientific data.","type":"Research Community","description":"","zenodo_community":null,"subject":[]}
|
||||
{"id":"00|context_____::6f567d9abd1c6603b0c0205a832bc757","acronym":"neanias-underwater","name":"NEANIAS Underwater Research Community","type":"Research Community","description":"","zenodo_community":null,"subject":["Ocean mapping","Multibeam Backscatter","Bathymetry","Seabed classification","Submarine Geomorphology","Underwater Photogrammetry"]}
|
||||
{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","acronym":"dh-ch","name":"Digital Humanities and Cultural Heritage","type":"Research Community","description":"This community gathers research results, data, scientific publications and projects related to the domain of Digital Humanities. This broad definition includes Humanities, Cultural Heritage, History, Archaeology and related fields.","zenodo_community":"https://zenodo.org/communities/oac_dh-ch","subject":["modern art","monuments","europeana data model","field walking","frescoes","LIDO metadata schema","art history","excavation","Arts and Humanities General","coins","temples","numismatics","lithics","environmental archaeology","digital cultural heritage","archaeological reports","history","CRMba","churches","cultural heritage","archaeological stratigraphy","religious art","digital humanities","archaeological sites","linguistic studies","bioarchaeology","architectural orders","palaeoanthropology","fine arts","europeana","CIDOC CRM","decorations","classic art","stratigraphy","digital archaeology","intangible cultural heritage","walls","chapels","CRMtex","Language and Literature","paintings","archaeology","mosaics","burials","medieval art","castles","CARARE metadata schema","statues","natural language processing","inscriptions","CRMsci","vaults","contemporary art","Arts and Humanities","CRMarchaeo","pottery"]}
|
||||
{"id":"00|context_____::5fde864866ea5ded4cc873b3170b63c3","acronym":"beopen","name":"Transport Research","type":"Research Community","description":"Welcome to the Open Research Gateway for Transport Research. This gateway is part of the TOPOS Observatory (https://www.topos-observatory.eu). The TOPOS aims to showcase the status and progress of open science uptake in transport research. It focuses on promoting territorial and cross border cooperation and contributing in the optimization of open science in transport research.\nThe TOPOS Observatory is supported by the EC H2020 BEOPEN project (824323)","zenodo_community":"https://zenodo.org/communities/be-open-transport","subject":["Green Transport","City mobility systems","Vulnerable road users","Traffic engineering","Transport electrification","Intermodal freight transport","Clean vehicle fleets","Intelligent mobility","Inflight refueling","District mobility systems","Navigation and control systems for optimised planning and routing","European Space Technology Platform","European Transport networks","Green cars","Inter-modality infrastructures","Advanced Take Off and Landing Ideas","Sustainable urban systems","port-area railway networks","Innovative forms of urban transport","Alliance for Logistics Innovation through Collaboration in Europe","Advisory Council for Aeronautics Research in Europe","Mobility services for people and goods","Guidance and traffic management","Passenger mobility","Smart mobility and services","transport innovation","high-speed railway","Vehicle design","Inland shipping","public transportation","aviation’s climate impact","Road transport","On-demand public transport","Personal Air Transport","Pipeline transport","European Association of Aviation Training and Education Organisations","Defrosting of railway infrastructure","Inclusive and affordable transport","River Information Services","jel:L92","Increased use of public transport","Seamless mobility","STRIA","trolleybus transport","Intelligent Transport System","Low-emission alternative energy for transport","Shared mobility for people and goods","Business model for urban mobility","Interoperability of transport systems","Cross-border train slot booking","Air transport","Transport pricing","Sustainable transport","European Rail Transport Research Advisory Council","Alternative aircraft configurations","Railways applications","urban transport","Environmental impact of transport","urban freight delivery systems","Automated Road Transport","Alternative fuels in public transport","Active LIDAR-sensor for GHG-measurements","Autonomous logistics operations","Rational use of motorised transport","Network and traffic management systems","electrification of railway wagons","Single European Sky","Electrified road systems","Railway dynamics","Motorway of the Sea","smart railway communications","Maritime transport","Environmental- friendly transport","Combined transport","Connected automated driving technology","Innovative freight logistics services","automated and shared vehicles","Alternative Aircraft Systems","Land-use and transport interaction","Public transport system","Business plan for shared mobility","Shared mobility","Growing of mobility demand","European Road Transport Research Advisory Council","WATERBORNE ETP","Effective transport management system","Short Sea Shipping","air traffic management","Sea hubs and the motorways of the sea","Urban mobility solutions","Smart city planning","Maritime spatial planning","EUropean rail Research Network of Excellence","ENERGY CONSUMPTION BY THE TRANSPORT SECTOR","Integrated urban plan","inland waterway services","European Conference of Transport Research Institutes","air vehicles","E-freight","Automated Driving","Automated ships","pricing for cross-border passenger transport","Vehicle efficiency","Railway transport","Electric vehicles","Road traffic monitoring","Deep sea shipping","Circular economy in transport","Traffic congestion","air transport system","Urban logistics","Rail transport","OpenStreetMap","high speed rail","Transportation engineering","Intermodal travel information","Flight Data Recorders","Advanced driver assistance systems","long distance freight transport","Inland waterway transport","Smart mobility","Mobility integration","Personal Rapid Transit system","Safety measures & requirements for roads","Green rail transport","Vehicle manufacturing","Future Airport Layout","Rail technologies","European Intermodal Research Advisory Council","inland navigation","Automated urban vehicles","ECSS-standards","Traveller services","Polluting transport","Air Traffic Control","Cooperative and connected and automated transport","Innovative powertrains","Quality of transport system and services","door-to- door logistics chain","Inter-modal aspects of urban mobility","Innovative freight delivery systems","urban freight delivery infrastructures"]}
|
||||
{"id":"00|context_____::a38bf77184799906a6ce86b9eb761c80","acronym":"sdsn-gr","name":"Sustainable Development Solutions Network - Greece","type":"Research Community","description":"The UN Sustainable Development Solutions Network (SDSN) has been operating since 2012 under the auspices of the UN Secretary-General. SDSN mobilizes global scientific and technological expertise to promote practical solutions for sustainable development, including the implementation of the Sustainable Development Goals (SDGs) and the Paris Climate Agreement. The Greek hub of SDSN has been included in the SDSN network in 2017 and is co-hosted by ICRE8: International Center for Research on the Environment and the Economy and the Political Economy of Sustainable Development Lab.","zenodo_community":"https://zenodo.org/communities/oac_sdsn-greece","subject":["SDG13 - Climate action","SDG8 - Decent work and economic\n\t\t\t\t\tgrowth","SDG15 - Life on land","SDG2 - Zero hunger","SDG17 - Partnerships for the\n\t\t\t\t\tgoals","SDG10 - Reduced inequalities","SDG5 - Gender equality","SDG12 - Responsible\n\t\t\t\t\tconsumption and production","SDG14 - Life below water","SDG6 - Clean water and\n\t\t\t\t\tsanitation","SDG11 - Sustainable cities and communities","SDG1 - No poverty","SDG3 -\n\t\t\t\t\tGood health and well being","SDG7 - Affordable and clean energy","SDG4 - Quality\n\t\t\t\t\teducation","SDG9 - Industry innovation and infrastructure","SDG16 - Peace justice\n\t\t\t\t\tand strong institutions"]}
|
||||
{"id":"context_____::e15922110564cf669aaed346e871bc01","acronym":"eutopia","name":"EUTOPIA Open Research Portal","type":"Research Community","description":"<p style=text-align:justify>EUTOPIA is an ambitious alliance of 10 like-minded universities ready to reinvent themselves: the Babeș-Bolyai University in Cluj-Napoca (Romania), the Vrije Universiteit Brussels (Belgium), the Ca'Foscari University of Europe (Italy), CY Cergy Paris Université (France), the Technische Universität Dresden (Germany), the University of Gothenburg (Sweden), the University of Ljubljana (Slovenia), the NOVA University Lisbon (Portugal), the University of Pompeu Fabra (Spain) and the University of Warwick (United Kingdom). Together, these 10 pioneers join forces to build the university of the future.</p>","zenodo_community":null}
|
||||
{"id":"context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","acronym":"enermaps","name":"Welcome to EnerMaps Gateway! Find the latest scientific data.","type":"Research Community","description":"","zenodo_community":null,"subject":[]}
|
||||
{"id":"context_____::6f567d9abd1c6603b0c0205a832bc757","acronym":"neanias-underwater","name":"NEANIAS Underwater Research Community","type":"Research Community","description":"","zenodo_community":null,"subject":["Ocean mapping","Multibeam Backscatter","Bathymetry","Seabed classification","Submarine Geomorphology","Underwater Photogrammetry"]}
|
||||
{"id":"context_____::04a00617ca659adc944977ac700ea14b","acronym":"dh-ch","name":"Digital Humanities and Cultural Heritage","type":"Research Community","description":"This community gathers research results, data, scientific publications and projects related to the domain of Digital Humanities. This broad definition includes Humanities, Cultural Heritage, History, Archaeology and related fields.","zenodo_community":"https://zenodo.org/communities/oac_dh-ch","subject":["modern art","monuments","europeana data model","field walking","frescoes","LIDO metadata schema","art history","excavation","Arts and Humanities General","coins","temples","numismatics","lithics","environmental archaeology","digital cultural heritage","archaeological reports","history","CRMba","churches","cultural heritage","archaeological stratigraphy","religious art","digital humanities","archaeological sites","linguistic studies","bioarchaeology","architectural orders","palaeoanthropology","fine arts","europeana","CIDOC CRM","decorations","classic art","stratigraphy","digital archaeology","intangible cultural heritage","walls","chapels","CRMtex","Language and Literature","paintings","archaeology","mosaics","burials","medieval art","castles","CARARE metadata schema","statues","natural language processing","inscriptions","CRMsci","vaults","contemporary art","Arts and Humanities","CRMarchaeo","pottery"]}
|
||||
{"id":"context_____::5fde864866ea5ded4cc873b3170b63c3","acronym":"beopen","name":"Transport Research","type":"Research Community","description":"Welcome to the Open Research Gateway for Transport Research. This gateway is part of the TOPOS Observatory (https://www.topos-observatory.eu). The TOPOS aims to showcase the status and progress of open science uptake in transport research. It focuses on promoting territorial and cross border cooperation and contributing in the optimization of open science in transport research.\nThe TOPOS Observatory is supported by the EC H2020 BEOPEN project (824323)","zenodo_community":"https://zenodo.org/communities/be-open-transport","subject":["Green Transport","City mobility systems","Vulnerable road users","Traffic engineering","Transport electrification","Intermodal freight transport","Clean vehicle fleets","Intelligent mobility","Inflight refueling","District mobility systems","Navigation and control systems for optimised planning and routing","European Space Technology Platform","European Transport networks","Green cars","Inter-modality infrastructures","Advanced Take Off and Landing Ideas","Sustainable urban systems","port-area railway networks","Innovative forms of urban transport","Alliance for Logistics Innovation through Collaboration in Europe","Advisory Council for Aeronautics Research in Europe","Mobility services for people and goods","Guidance and traffic management","Passenger mobility","Smart mobility and services","transport innovation","high-speed railway","Vehicle design","Inland shipping","public transportation","aviation’s climate impact","Road transport","On-demand public transport","Personal Air Transport","Pipeline transport","European Association of Aviation Training and Education Organisations","Defrosting of railway infrastructure","Inclusive and affordable transport","River Information Services","jel:L92","Increased use of public transport","Seamless mobility","STRIA","trolleybus transport","Intelligent Transport System","Low-emission alternative energy for transport","Shared mobility for people and goods","Business model for urban mobility","Interoperability of transport systems","Cross-border train slot booking","Air transport","Transport pricing","Sustainable transport","European Rail Transport Research Advisory Council","Alternative aircraft configurations","Railways applications","urban transport","Environmental impact of transport","urban freight delivery systems","Automated Road Transport","Alternative fuels in public transport","Active LIDAR-sensor for GHG-measurements","Autonomous logistics operations","Rational use of motorised transport","Network and traffic management systems","electrification of railway wagons","Single European Sky","Electrified road systems","Railway dynamics","Motorway of the Sea","smart railway communications","Maritime transport","Environmental- friendly transport","Combined transport","Connected automated driving technology","Innovative freight logistics services","automated and shared vehicles","Alternative Aircraft Systems","Land-use and transport interaction","Public transport system","Business plan for shared mobility","Shared mobility","Growing of mobility demand","European Road Transport Research Advisory Council","WATERBORNE ETP","Effective transport management system","Short Sea Shipping","air traffic management","Sea hubs and the motorways of the sea","Urban mobility solutions","Smart city planning","Maritime spatial planning","EUropean rail Research Network of Excellence","ENERGY CONSUMPTION BY THE TRANSPORT SECTOR","Integrated urban plan","inland waterway services","European Conference of Transport Research Institutes","air vehicles","E-freight","Automated Driving","Automated ships","pricing for cross-border passenger transport","Vehicle efficiency","Railway transport","Electric vehicles","Road traffic monitoring","Deep sea shipping","Circular economy in transport","Traffic congestion","air transport system","Urban logistics","Rail transport","OpenStreetMap","high speed rail","Transportation engineering","Intermodal travel information","Flight Data Recorders","Advanced driver assistance systems","long distance freight transport","Inland waterway transport","Smart mobility","Mobility integration","Personal Rapid Transit system","Safety measures & requirements for roads","Green rail transport","Vehicle manufacturing","Future Airport Layout","Rail technologies","European Intermodal Research Advisory Council","inland navigation","Automated urban vehicles","ECSS-standards","Traveller services","Polluting transport","Air Traffic Control","Cooperative and connected and automated transport","Innovative powertrains","Quality of transport system and services","door-to- door logistics chain","Inter-modal aspects of urban mobility","Innovative freight delivery systems","urban freight delivery infrastructures"]}
|
||||
{"id":"context_____::a38bf77184799906a6ce86b9eb761c80","acronym":"sdsn-gr","name":"Sustainable Development Solutions Network - Greece","type":"Research Community","description":"The UN Sustainable Development Solutions Network (SDSN) has been operating since 2012 under the auspices of the UN Secretary-General. SDSN mobilizes global scientific and technological expertise to promote practical solutions for sustainable development, including the implementation of the Sustainable Development Goals (SDGs) and the Paris Climate Agreement. The Greek hub of SDSN has been included in the SDSN network in 2017 and is co-hosted by ICRE8: International Center for Research on the Environment and the Economy and the Political Economy of Sustainable Development Lab.","zenodo_community":"https://zenodo.org/communities/oac_sdsn-greece","subject":["SDG13 - Climate action","SDG8 - Decent work and economic\n\t\t\t\t\tgrowth","SDG15 - Life on land","SDG2 - Zero hunger","SDG17 - Partnerships for the\n\t\t\t\t\tgoals","SDG10 - Reduced inequalities","SDG5 - Gender equality","SDG12 - Responsible\n\t\t\t\t\tconsumption and production","SDG14 - Life below water","SDG6 - Clean water and\n\t\t\t\t\tsanitation","SDG11 - Sustainable cities and communities","SDG1 - No poverty","SDG3 -\n\t\t\t\t\tGood health and well being","SDG7 - Affordable and clean energy","SDG4 - Quality\n\t\t\t\t\teducation","SDG9 - Industry innovation and infrastructure","SDG16 - Peace justice\n\t\t\t\t\tand strong institutions"]}
|
|
@ -1,4 +1,4 @@
|
|||
{"id":"10|doajarticles::9c4b678901e5276d9e3addee566816af","originalId":["doajarticles::1798-355X"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"not available","officialname":"Pelitutkimuksen vuosikirja","englishname":"Pelitutkimuksen vuosikirja","websiteurl":"http://www.pelitutkimus.fi","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Geography. Anthropology. Recreation: Recreation. Leisure | Science: Mathematics: Instruments and machines: Electronic computers. Computer science: Computer software"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"10|doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","originalId":["doajarticles::1879-9337"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Review of Development Finance","englishname":"Review of Development Finance","websiteurl":"http://www.journals.elsevier.com/review-of-development-finance/","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Social Sciences: Industries. Land use. Labor: Economic growth, development, planning | Social Sciences: Finance"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"10|doajarticles::1fa6859d71faa77b32d82f278c6ed1df","originalId":["doajarticles::1048-9533"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Journal of Applied Mathematics and Stochastic Analysis","englishname":"Journal of Applied Mathematics and Stochastic Analysis","websiteurl":"https://www.hindawi.com/journals/jamsa","logourl":null,"dateofvalidation":null,"description":null,"subjects":[],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"10|doajarticles::a5314b60f79b869cb5d3a2709167bc3a","originalId":["doajarticles::0322-788X"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Statistika: Statistics and Economy Journal","englishname":"Statistika: Statistics and Economy Journal","websiteurl":"http://www.czso.cz/statistika_journal","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Social Sciences: Statistics"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"doajarticles::9c4b678901e5276d9e3addee566816af","originalId":["doajarticles::1798-355X"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"not available","officialname":"Pelitutkimuksen vuosikirja","englishname":"Pelitutkimuksen vuosikirja","websiteurl":"http://www.pelitutkimus.fi","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Geography. Anthropology. Recreation: Recreation. Leisure | Science: Mathematics: Instruments and machines: Electronic computers. Computer science: Computer software"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","originalId":["doajarticles::1879-9337"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Review of Development Finance","englishname":"Review of Development Finance","websiteurl":"http://www.journals.elsevier.com/review-of-development-finance/","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Social Sciences: Industries. Land use. Labor: Economic growth, development, planning | Social Sciences: Finance"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"doajarticles::1fa6859d71faa77b32d82f278c6ed1df","originalId":["doajarticles::1048-9533"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Journal of Applied Mathematics and Stochastic Analysis","englishname":"Journal of Applied Mathematics and Stochastic Analysis","websiteurl":"https://www.hindawi.com/journals/jamsa","logourl":null,"dateofvalidation":null,"description":null,"subjects":[],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
||||
{"id":"doajarticles::a5314b60f79b869cb5d3a2709167bc3a","originalId":["doajarticles::0322-788X"],"pid":[],"datasourcetype":{"scheme":"pubsrepository::journal","value":"Journal"},"openairecompatibility":"collected from a compatible aggregator","officialname":"Statistika: Statistics and Economy Journal","englishname":"Statistika: Statistics and Economy Journal","websiteurl":"http://www.czso.cz/statistika_journal","logourl":null,"dateofvalidation":null,"description":null,"subjects":["Social Sciences: Statistics"],"languages":[],"contenttypes":["Journal articles"],"releasestartdate":null,"releaseenddate":null,"missionstatementurl":null,"accessrights":null,"uploadrights":null,"databaseaccessrestriction":null,"datauploadrestriction":null,"versioning":false,"citationguidelineurl":null,"pidsystems":null,"certificates":null,"policies":[],"journal":null}
|
|
@ -1,3 +1,3 @@
|
|||
{"legalshortname":"SHoF","legalname":"Swedish House of Finance","websiteurl":"http://houseoffinance.se/","alternativenames":["SHoF"],"country":{"code":"SE","label":"Sweden"},"id":"20|grid________::87698402476531ba39e61f1df38f2a91","pid":[{"scheme":"grid","value":"grid.451954.8"}]}
|
||||
{"legalshortname":"Korean Elementary Moral Education Society","legalname":"Korean Elementary Moral Education Society","websiteurl":"http://www.ethics.or.kr/","alternativenames":["한국초등도덕교육학회"],"country":{"code":"KR","label":"Korea (Republic of)"},"id":"20|grid________::bd5cbea5dc434b8fd811a880cb9d4a05","pid":[{"scheme":"grid","value":"grid.496778.3"}]}
|
||||
{"legalshortname":"NHC","legalname":"National Health Council","websiteurl":"http://www.nationalhealthcouncil.org/","alternativenames":["NHC"],"country":{"code":"US","label":"United States"},"id":"20|grid________::94948cc036605bf4a00ec77ce5ca92d3","pid":[{"scheme":"grid","value":"grid.487707.b"}]}
|
||||
{"legalshortname":"SHoF","legalname":"Swedish House of Finance","websiteurl":"http://houseoffinance.se/","alternativenames":["SHoF"],"country":{"code":"SE","label":"Sweden"},"id":"grid________::87698402476531ba39e61f1df38f2a91","pid":[{"scheme":"grid","value":"grid.451954.8"}]}
|
||||
{"legalshortname":"Korean Elementary Moral Education Society","legalname":"Korean Elementary Moral Education Society","websiteurl":"http://www.ethics.or.kr/","alternativenames":["한국초등도덕교육학회"],"country":{"code":"KR","label":"Korea (Republic of)"},"id":"grid________::bd5cbea5dc434b8fd811a880cb9d4a05","pid":[{"scheme":"grid","value":"grid.496778.3"}]}
|
||||
{"legalshortname":"NHC","legalname":"National Health Council","websiteurl":"http://www.nationalhealthcouncil.org/","alternativenames":["NHC"],"country":{"code":"US","label":"United States"},"id":"grid________::94948cc036605bf4a00ec77ce5ca92d3","pid":[{"scheme":"grid","value":"grid.487707.b"}]}
|
|
@ -1,3 +1,3 @@
|
|||
{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","websiteurl":null,"code":"135027","acronym":null,"title":"Dynamic 3D resolution-enhanced low-coherence interferometric imaging / Consortium: Hi-Lo","startdate":null,"enddate":null,"callidentifier":"Fotoniikka ja modernit kuvantamismenetelmät LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||
{"id":"40|aka_________::9d1af21dbd0f5bc719f71553d19a6b3a","websiteurl":null,"code":"316061","acronym":null,"title":"Finnish Imaging of Degenerative Shoulder Study (FIMAGE): A study on the prevalence of degenerative imaging changes of the shoulder and their relevance to clinical symptoms in the general population.","startdate":null,"enddate":null,"callidentifier":"Academy Project Funding TT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||
{"id":"40|anr_________::1f21edc5c902be305ee47148955c6e50","websiteurl":null,"code":"ANR-17-CE05-0033","acronym":"MOISE","title":"METAL OXIDES AS LOW LOADED NANO-IRIDIUM SUPPORT FOR COMPETITIVE WATER ELECTROLYSIS","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||
{"id":"aka_________::01bb7b48e29d732a1c7bc5150b9195c4","websiteurl":null,"code":"135027","acronym":null,"title":"Dynamic 3D resolution-enhanced low-coherence interferometric imaging / Consortium: Hi-Lo","startdate":null,"enddate":null,"callidentifier":"Fotoniikka ja modernit kuvantamismenetelmät LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||
{"id":"aka_________::9d1af21dbd0f5bc719f71553d19a6b3a","websiteurl":null,"code":"316061","acronym":null,"title":"Finnish Imaging of Degenerative Shoulder Study (FIMAGE): A study on the prevalence of degenerative imaging changes of the shoulder and their relevance to clinical symptoms in the general population.","startdate":null,"enddate":null,"callidentifier":"Academy Project Funding TT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||
{"id":"anr_________::1f21edc5c902be305ee47148955c6e50","websiteurl":null,"code":"ANR-17-CE05-0033","acronym":"MOISE","title":"METAL OXIDES AS LOW LOADED NANO-IRIDIUM SUPPORT FOR COMPETITIVE WATER ELECTROLYSIS","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
File diff suppressed because one or more lines are too long
|
@ -1,25 +1,25 @@
|
|||
{"source":{"id":"00|context_____::99c8ef576f385bc322564d5694df6fc2","type":"context"},"target":{"id":"10|doajarticles::9c4b678901e5276d9e3addee566816af","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::9c4b678901e5276d9e3addee566816af","type":"datasource"},"target":{"id":"00|context_____::99c8ef576f385bc322564d5694df6fc2","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::e15922110564cf669aaed346e871bc01","type":"context"},"target":{"id":"10|doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","type":"datasource"},"target":{"id":"00|context_____::e15922110564cf669aaed346e871bc01","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","type":"context"},"target":{"id":"10|doajarticles::1fa6859d71faa77b32d82f278c6ed1df","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::1fa6859d71faa77b32d82f278c6ed1df","type":"datasource"},"target":{"id":"00|context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","type":"context"},"target":{"id":"10|doajarticles::6eb31d13b12bc06bbac06aef63cf33c9","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::6eb31d13b12bc06bbac06aef63cf33c9","type":"datasource"},"target":{"id":"00|context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c2","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c2","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"10|openaire____::c5502a43e76feab55dd00cf50f519125","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|openaire____::c5502a43e76feab55dd00cf50f519125","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"10|re3data_____::a48f09c562b247a9919acfe195549b47","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|re3data_____::a48f09c562b247a9919acfe195549b47","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"10|opendoar____::97275a23ca44226c9964043c8462be96","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|opendoar____::97275a23ca44226c9964043c8462be96","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"10|doajarticles::2899208a99aa7d142646e0a80bfeef05","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::2899208a99aa7d142646e0a80bfeef05","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"target":{"id":"10|re3data_____::5b9bf9171d92df854cf3c520692e9122","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|re3data_____::5b9bf9171d92df854cf3c520692e9122","type":"datasource"},"target":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"target":{"id":"10|doajarticles::c7d3de67dc77af72f6747157441252ec","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"10|doajarticles::c7d3de67dc77af72f6747157441252ec","type":"datasource"},"target":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"target":{"id":"10|re3data_____::8515794670370f49c1d176c399c714f5","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::99c8ef576f385bc322564d5694df6fc2","sourceType":"context","target":"doajarticles::9c4b678901e5276d9e3addee566816af","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::9c4b678901e5276d9e3addee566816af","sourceType":"datasource","target":"context_____::99c8ef576f385bc322564d5694df6fc2","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::e15922110564cf669aaed346e871bc01","sourceType":"context","target":"doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::acb7c79bb85d3b3a7b75389f5d9570f5","sourceType":"datasource","target":"context_____::e15922110564cf669aaed346e871bc01","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","sourceType":"context","target":"doajarticles::1fa6859d71faa77b32d82f278c6ed1df","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::1fa6859d71faa77b32d82f278c6ed1df","sourceType":"datasource","target":"context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","sourceType":"context","target":"doajarticles::6eb31d13b12bc06bbac06aef63cf33c9","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::6eb31d13b12bc06bbac06aef63cf33c9","sourceType":"datasource","target":"context_____::aa0e56dd2e9d2a0be749f5debdd2b3d8","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"aka_________::01bb7b48e29d732a1c7bc5150b9195c4","targetType":"project","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"aka_________::01bb7b48e29d732a1c7bc5150b9195c4","sourceType":"project","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"aka_________::01bb7b48e29d732a1c7bc5150b9195c2","targetType":"project","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"aka_________::01bb7b48e29d732a1c7bc5150b9195c2","sourceType":"project","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"openaire____::c5502a43e76feab55dd00cf50f519125","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"openaire____::c5502a43e76feab55dd00cf50f519125","sourceType":"datasource","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"re3data_____::a48f09c562b247a9919acfe195549b47","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"re3data_____::a48f09c562b247a9919acfe195549b47","sourceType":"datasource","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"opendoar____::97275a23ca44226c9964043c8462be96","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"opendoar____::97275a23ca44226c9964043c8462be96","sourceType":"datasource","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"doajarticles::2899208a99aa7d142646e0a80bfeef05","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::2899208a99aa7d142646e0a80bfeef05","sourceType":"datasource","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","sourceType":"context","target":"re3data_____::5b9bf9171d92df854cf3c520692e9122","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"re3data_____::5b9bf9171d92df854cf3c520692e9122","sourceType":"datasource","target":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","sourceType":"context","target":"doajarticles::c7d3de67dc77af72f6747157441252ec","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"doajarticles::c7d3de67dc77af72f6747157441252ec","sourceType":"datasource","target":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","sourceType":"context","target":"re3data_____::8515794670370f49c1d176c399c714f5","targetType":"datasource","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
|
@ -1,6 +1,6 @@
|
|||
{"source":{"id":"20|grid________::87698402476531ba39e61f1df38f2a91","type":"datasource"},"target":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::04a00617ca659adc944977ac700ea14b","type":"context"},"target":{"id":"20|grid________::87698402476531ba39e61f1df38f2a91","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"20|grid________::94948cc036605bf4a00ec77ce5ca92d3","type":"datasource"},"target":{"id":"00|context_____::5fde864866ea5ded4cc873b3170b63c3","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::5fde864866ea5ded4cc873b3170b63c3","type":"context"},"target":{"id":"20|grid________::94948cc036605bf4a00ec77ce5ca92d3","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"20|grid________::94948cc036605bf4a00ec77ce5ca92d3","type":"datasource"},"target":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":{"id":"00|context_____::e6c151d449e1db05b1ffb5ad5ec656cf","type":"context"},"target":{"id":"20|grid________::94948cc036605bf4a00ec77ce5ca92d3","type":"datasource"},"reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"grid________::87698402476531ba39e61f1df38f2a91","sourceType":"organization","target":"context_____::04a00617ca659adc944977ac700ea14b","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::04a00617ca659adc944977ac700ea14b","sourceType":"context","target":"grid________::87698402476531ba39e61f1df38f2a91","targetType":"organization","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"grid________::94948cc036605bf4a00ec77ce5ca92d3","sourceType":"organization","target":"context_____::5fde864866ea5ded4cc873b3170b63c3","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::5fde864866ea5ded4cc873b3170b63c3","sourceType":"context","target":"grid________::94948cc036605bf4a00ec77ce5ca92d3","targetType":"organization","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"grid________::94948cc036605bf4a00ec77ce5ca92d3","sourceType":"organization","target":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","targetType":"context","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
||||
{"source":"context_____::e6c151d449e1db05b1ffb5ad5ec656cf","sourceType":"context","target":"grid________::94948cc036605bf4a00ec77ce5ca92d3","targetType":"organization","reltype":{"name":"IsRelatedTo","type":"relationship"},"provenance":{"provenance":"Linked by user","trust":"0.9"},"validated":false,"validationDate":null}
|
Loading…
Reference in New Issue