Merge pull request 'eoscAddOrganizationProjectsAndRelations' (#7) from eoscAddOrganizationProjectsAndRelations into eoscDump

Reviewed-on: #7
This commit is contained in:
Miriam Baglioni 2024-01-08 12:04:47 +01:00
commit 09b03c1c8a
54 changed files with 2429 additions and 570 deletions

49
api/pom.xml Normal file
View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-graph-dump</artifactId>
<version>1.2.5-SNAPSHOT</version>
</parent>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>api</artifactId>
<version>1.2.5-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
</dependency>
<dependency>
<groupId>jaxen</groupId>
<artifactId>jaxen</artifactId>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,75 @@
package eu.dnetlib.dhp.communityapi;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* @author miriam.baglioni
* @Date 06/10/23
*/
public class QueryCommunityAPI {
private static final String PRODUCTION_BASE_URL = "https://services.openaire.eu/openaire/";
private static String get(String geturl) throws IOException {
URL url = new URL(geturl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setDoOutput(true);
conn.setRequestMethod("GET");
int responseCode = conn.getResponseCode();
String body = getBody(conn);
conn.disconnect();
if (responseCode != HttpURLConnection.HTTP_OK)
throw new IOException("Unexpected code " + responseCode + body);
return body;
}
public static String communities() throws IOException {
return get(PRODUCTION_BASE_URL + "community/communities");
}
public static String community(String id) throws IOException {
return get(PRODUCTION_BASE_URL + "community/" + id);
}
public static String communityDatasource(String id) throws IOException {
return get(PRODUCTION_BASE_URL + "community/" + id + "/contentproviders");
}
public static String communityPropagationOrganization(String id) throws IOException {
return get(PRODUCTION_BASE_URL + "community/" + id + "/propagationOrganizations");
}
public static String communityProjects(String id, String page, String size) throws IOException {
return get(PRODUCTION_BASE_URL + "community/" + id + "/projects/" + page + "/" + size);
}
private static String getBody(HttpURLConnection conn) throws IOException {
String body = "{}";
try (BufferedReader br = new BufferedReader(
new InputStreamReader(conn.getInputStream(), "utf-8"))) {
StringBuilder response = new StringBuilder();
String responseLine = null;
while ((responseLine = br.readLine()) != null) {
response.append(responseLine.trim());
}
body = response.toString();
}
return body;
}
}

View File

@ -0,0 +1,30 @@
package eu.dnetlib.dhp.communityapi.model;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@JsonAutoDetect
@JsonIgnoreProperties(ignoreUnknown = true)
public class CommunityContentprovider {
private String openaireId;
private String enabled;
public String getEnabled() {
return enabled;
}
public void setEnabled(String enabled) {
this.enabled = enabled;
}
public String getOpenaireId() {
return openaireId;
}
public void setOpenaireId(final String openaireId) {
this.openaireId = openaireId;
}
}

View File

@ -0,0 +1,21 @@
package eu.dnetlib.dhp.communityapi.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class CommunityEntityMap extends HashMap<String, List<String>> {
public CommunityEntityMap() {
super();
}
public List<String> get(String key) {
if (super.get(key) == null) {
return new ArrayList<>();
}
return super.get(key);
}
}

View File

@ -0,0 +1,82 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author miriam.baglioni
* @Date 06/10/23
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class CommunityModel implements Serializable {
private String id;
private String name;
private String description;
private String status;
private String type;
private List<String> subjects;
private String zenodoCommunity;
public List<String> getSubjects() {
return subjects;
}
public void setSubjects(List<String> subjects) {
this.subjects = subjects;
}
public String getZenodoCommunity() {
return zenodoCommunity;
}
public void setZenodoCommunity(String zenodoCommunity) {
this.zenodoCommunity = zenodoCommunity;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}

View File

@ -0,0 +1,15 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import java.util.ArrayList;
/**
* @author miriam.baglioni
* @Date 06/10/23
*/
public class CommunitySummary extends ArrayList<CommunityModel> implements Serializable {
public CommunitySummary() {
super();
}
}

View File

@ -0,0 +1,51 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author miriam.baglioni
* @Date 09/10/23
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ContentModel implements Serializable {
private List<ProjectModel> content;
private Integer totalPages;
private Boolean last;
private Integer number;
public List<ProjectModel> getContent() {
return content;
}
public void setContent(List<ProjectModel> content) {
this.content = content;
}
public Integer getTotalPages() {
return totalPages;
}
public void setTotalPages(Integer totalPages) {
this.totalPages = totalPages;
}
public Boolean getLast() {
return last;
}
public void setLast(Boolean last) {
this.last = last;
}
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
}

View File

@ -0,0 +1,11 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import java.util.ArrayList;
public class DatasourceList extends ArrayList<CommunityContentprovider> implements Serializable {
public DatasourceList() {
super();
}
}

View File

@ -0,0 +1,16 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import java.util.ArrayList;
/**
* @author miriam.baglioni
* @Date 09/10/23
*/
public class OrganizationList extends ArrayList<String> implements Serializable {
public OrganizationList() {
super();
}
}

View File

@ -0,0 +1,44 @@
package eu.dnetlib.dhp.communityapi.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author miriam.baglioni
* @Date 09/10/23
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ProjectModel implements Serializable {
private String openaireId;
private String funder;
private String gratId;
public String getFunder() {
return funder;
}
public void setFunder(String funder) {
this.funder = funder;
}
public String getGratId() {
return gratId;
}
public void setGratId(String gratId) {
this.gratId = gratId;
}
public String getOpenaireId() {
return openaireId;
}
public void setOpenaireId(String openaireId) {
this.openaireId = openaireId;
}
}

View File

@ -0,0 +1,46 @@
package eu.dnetlib.dhp.eosc.model;
import java.io.Serializable;
import java.util.List;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 13/09/22
*/
public class Affiliation implements Serializable {
@JsonSchema(description = "the OpenAIRE id of the organizaiton")
private String id;
@JsonSchema(description = "the name of the organization")
private String name;
@JsonSchema(description = "the list of pids we have in OpenAIRE for the organization")
private List<OrganizationPid> pid;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<OrganizationPid> getPid() {
return pid;
}
public void setPid(List<OrganizationPid> pid) {
this.pid = pid;
}
}

View File

@ -1,58 +1,31 @@
package eu.dnetlib.dhp.eosc.model;
import java.io.Serializable;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 26/01/23
* To store information about the funder funding the project related to the result. It extends
* eu.dnetlib.dhp.schema.dump.oaf.Funder with the following parameter: - - private
* eu.dnetdlib.dhp.schema.dump.oaf.graph.Fundings funding_stream to store the fundingstream
*/
public class Funder implements Serializable {
public class Funder extends FunderShort {
@JsonSchema(description = "The short name of the funder (EC)")
private String shortName;
@JsonSchema(description = "Description of the funding stream")
private Fundings funding_stream;
@JsonSchema(description = "The name of the funder (European Commission)")
private String name;
@JsonSchema(
description = "Geographical jurisdiction (e.g. for European Commission is EU, for Croatian Science Foundation is HR)")
private String jurisdiction;
public String getJurisdiction() {
return jurisdiction;
public Fundings getFunding_stream() {
return funding_stream;
}
public void setJurisdiction(String jurisdiction) {
this.jurisdiction = jurisdiction;
}
public String getShortName() {
return shortName;
}
public void setShortName(String shortName) {
this.shortName = shortName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@JsonSchema(description = "Stream of funding (e.g. for European Commission can be H2020 or FP7)")
private String fundingStream;
public String getFundingStream() {
return fundingStream;
}
public void setFundingStream(String fundingStream) {
this.fundingStream = fundingStream;
public void setFunding_stream(Fundings funding_stream) {
this.funding_stream = funding_stream;
}
}

View File

@ -0,0 +1,58 @@
package eu.dnetlib.dhp.eosc.model;
import java.io.Serializable;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 26/01/23
*/
public class FunderShort implements Serializable {
@JsonSchema(description = "The short name of the funder (EC)")
private String shortName;
@JsonSchema(description = "The name of the funder (European Commission)")
private String name;
@JsonSchema(
description = "Geographical jurisdiction (e.g. for European Commission is EU, for Croatian Science Foundation is HR)")
private String jurisdiction;
public String getJurisdiction() {
return jurisdiction;
}
public void setJurisdiction(String jurisdiction) {
this.jurisdiction = jurisdiction;
}
public String getShortName() {
return shortName;
}
public void setShortName(String shortName) {
this.shortName = shortName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@JsonSchema(description = "Stream of funding (e.g. for European Commission can be H2020 or FP7)")
private String fundingStream;
public String getFundingStream() {
return fundingStream;
}
public void setFundingStream(String fundingStream) {
this.fundingStream = fundingStream;
}
}

View File

@ -0,0 +1,46 @@
package eu.dnetlib.dhp.eosc.model;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
import java.io.Serializable;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* To store inforamtion about the funding stream. It has two parameters: - private String id to store the id of the
* fundings stream. The id is created by appending the shortname of the funder to the name of each level in the xml
* representing the fundng stream. For example: if the funder is the European Commission, the funding level 0 name is
* FP7, the funding level 1 name is SP3 and the funding level 2 name is PEOPLE then the id will be: EC::FP7::SP3::PEOPLE
* - private String description to describe the funding stream. It is created by concatenating the description of each
* funding level so for the example above the description would be: SEVENTH FRAMEWORK PROGRAMME - SP3-People -
* Marie-Curie Actions
*/
public class Fundings implements Serializable {
@JsonSchema(description = "Id of the funding stream")
private String id;
private String description;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}

View File

@ -0,0 +1,69 @@
package eu.dnetlib.dhp.eosc.model;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
import java.io.Serializable;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* To describe the funded amount. It has the following parameters: - private String currency to store the currency of
* the fund - private float totalcost to store the total cost of the project - private float fundedamount to store the
* funded amount by the funder
*/
public class Granted implements Serializable {
@JsonSchema(description = "The currency of the granted amount (e.g. EUR)")
private String currency;
@JsonSchema(description = "The total cost of the project")
private float totalcost;
@JsonSchema(description = "The funded amount")
private float fundedamount;
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public float getTotalcost() {
return totalcost;
}
public void setTotalcost(float totalcost) {
this.totalcost = totalcost;
}
public float getFundedamount() {
return fundedamount;
}
public void setFundedamount(float fundedamount) {
this.fundedamount = fundedamount;
}
public static Granted newInstance(String currency, float totalcost, float fundedamount) {
Granted granted = new Granted();
granted.currency = currency;
granted.totalcost = totalcost;
granted.fundedamount = fundedamount;
return granted;
}
public static Granted newInstance(String currency, float fundedamount) {
Granted granted = new Granted();
granted.currency = currency;
granted.fundedamount = fundedamount;
return granted;
}
}

View File

@ -7,19 +7,72 @@ import java.util.List;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 13/09/22
* To represent the generic organizaiton. It has the following parameters:
* - private String legalshortname to store the legalshortname of the organizaiton
* - private String legalname to store the legal name of the organization
* - private String websiteurl to store the websiteurl of the organization
* - private List<String> alternativenames to store the alternative names of the organization
* - private Country country to store the country of the organization
* - private String id to store the openaire id of the organization
* - private List<OrganizationPid> pid to store the list of pids for the organization
*/
public class Organization implements Serializable {
@JsonSchema(description = "the OpenAIRE id of the organizaiton")
private String legalshortname;
private String legalname;
private String websiteurl;
@JsonSchema(description = "Alternative names that identify the organisation")
private List<String> alternativenames;
@JsonSchema(description = "The organisation country")
private Country country;
@JsonSchema(description = "The OpenAIRE id for the organisation")
private String id;
@JsonSchema(description = "the name of the organization")
private String name;
@JsonSchema(description = "the list of pids we have in OpenAIRE for the organization")
@JsonSchema(description = "Persistent identifiers for the organisation i.e. isni 0000000090326370")
private List<OrganizationPid> pid;
public String getLegalshortname() {
return legalshortname;
}
public void setLegalshortname(String legalshortname) {
this.legalshortname = legalshortname;
}
public String getLegalname() {
return legalname;
}
public void setLegalname(String legalname) {
this.legalname = legalname;
}
public String getWebsiteurl() {
return websiteurl;
}
public void setWebsiteurl(String websiteurl) {
this.websiteurl = websiteurl;
}
public List<String> getAlternativenames() {
return alternativenames;
}
public void setAlternativenames(List<String> alternativenames) {
this.alternativenames = alternativenames;
}
public Country getCountry() {
return country;
}
public void setCountry(Country country) {
this.country = country;
}
public String getId() {
return id;
}
@ -28,14 +81,6 @@ public class Organization implements Serializable {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<OrganizationPid> getPid() {
return pid;
}
@ -43,4 +88,5 @@ public class Organization implements Serializable {
public void setPid(List<OrganizationPid> pid) {
this.pid = pid;
}
}

View File

@ -32,4 +32,12 @@ public class OrganizationPid implements Serializable {
public void setValue(String value) {
this.value = value;
}
public static OrganizationPid newInstance(String type, String value) {
OrganizationPid op = new OrganizationPid();
op.type = type;
op.value = value;
return op;
}
}

View File

@ -0,0 +1,49 @@
package eu.dnetlib.dhp.eosc.model;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
import java.io.Serializable;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* To store information about the ec programme for the project. It has the following parameters: - private String code
* to store the code of the programme - private String description to store the description of the programme
*/
public class Programme implements Serializable {
@JsonSchema(description = "The code of the programme")
private String code;
@JsonSchema(description = "The description of the programme")
private String description;
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public static Programme newInstance(String code, String description) {
Programme p = new Programme();
p.code = code;
p.description = description;
return p;
}
}

View File

@ -1,56 +1,80 @@
package eu.dnetlib.dhp.eosc.model;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
import java.io.Serializable;
import java.util.List;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 26/01/23
* This is the class representing the Project in the model used for the dumps of the whole graph. At the moment the dump
* of the Projects differs from the other dumps because we do not create relations between Funders (Organization) and
* Projects but we put the information about the Funder within the Project representation. We also removed the
* collected from element from the Project. No relation between the Project and the Datasource entity from which it is
* collected will be created. We will never create relations between Project and Datasource. In case some relation will
* be extracted from the Project they will refer the Funder and will be of type ( organization -> funds -> project,
* project -> isFundedBy -> organization) We also removed the duration parameter because the most of times it is set to
* 0. It has the following parameters:
* - private String id to store the id of the project (OpenAIRE id)
* - private String websiteurl to store the websiteurl of the project
* - private String code to store the grant agreement of the project
* - private String acronym to store the acronym of the project
* - private String title to store the tile of the project
* - private String startdate to store the start date
* - private String enddate to store the end date
* - private String callidentifier to store the call indentifier
* - private String keywords to store the keywords
* - private boolean openaccessmandateforpublications to store if the project must accomplish to the open access mandate
* for publications. This value will be set to true if one of the field in the project represented in the internal model
* is set to true
* - private boolean openaccessmandatefordataset to store if the project must accomplish to the open access mandate for
* dataset. It is set to the value in the corresponding filed of the project represented in the internal model
* - private List<String> subject to store the list of subjects of the project
* - private List<Funder> funding to store the list of funder of the project
* - private String summary to store the summary of the project
* - private Granted granted to store the granted amount
* - private List<Programme> h2020programme to store the list of programmes the project is related to
*/
public class Project implements Serializable {
@JsonSchema(description = "The OpenAIRE id for the project")
protected String id;// OpenAIRE id
private String id;
@JsonSchema(description = "The grant agreement number")
protected String code;
private String websiteurl;
private String code;
private String acronym;
private String title;
@JsonSchema(description = "The acronym of the project")
protected String acronym;
private String startdate;
protected String title;
private String enddate;
@JsonSchema(description = "Information about the funder funding the project")
private Funder funder;
private String callidentifier;
private Provenance provenance;
private String keywords;
private Validated validated;
private boolean openaccessmandateforpublications;
public void setValidated(Validated validated) {
this.validated = validated;
}
private boolean openaccessmandatefordataset;
private List<String> subject;
public Validated getValidated() {
return validated;
}
@JsonSchema(description = "Funding information for the project")
private List<Funder> funding;
public Provenance getProvenance() {
return provenance;
}
private String summary;
public void setProvenance(Provenance provenance) {
this.provenance = provenance;
}
@JsonSchema(description = "The money granted to the project")
private Granted granted;
public Funder getFunder() {
return funder;
}
public void setFunder(Funder funders) {
this.funder = funders;
}
@JsonSchema(description = "The h2020 programme funding the project")
private List<Programme> h2020programme;
public String getId() {
return id;
@ -60,6 +84,14 @@ public class Project implements Serializable {
this.id = id;
}
public String getWebsiteurl() {
return websiteurl;
}
public void setWebsiteurl(String websiteurl) {
this.websiteurl = websiteurl;
}
public String getCode() {
return code;
}
@ -84,14 +116,91 @@ public class Project implements Serializable {
this.title = title;
}
public static Project newInstance(String id, String code, String acronym, String title, Funder funder) {
Project project = new Project();
project.setAcronym(acronym);
project.setCode(code);
project.setFunder(funder);
project.setId(id);
project.setTitle(title);
return project;
public String getStartdate() {
return startdate;
}
public void setStartdate(String startdate) {
this.startdate = startdate;
}
public String getEnddate() {
return enddate;
}
public void setEnddate(String enddate) {
this.enddate = enddate;
}
public String getCallidentifier() {
return callidentifier;
}
public void setCallidentifier(String callidentifier) {
this.callidentifier = callidentifier;
}
public String getKeywords() {
return keywords;
}
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public boolean isOpenaccessmandateforpublications() {
return openaccessmandateforpublications;
}
public void setOpenaccessmandateforpublications(boolean openaccessmandateforpublications) {
this.openaccessmandateforpublications = openaccessmandateforpublications;
}
public boolean isOpenaccessmandatefordataset() {
return openaccessmandatefordataset;
}
public void setOpenaccessmandatefordataset(boolean openaccessmandatefordataset) {
this.openaccessmandatefordataset = openaccessmandatefordataset;
}
public List<String> getSubject() {
return subject;
}
public void setSubject(List<String> subject) {
this.subject = subject;
}
public List<Funder> getFunding() {
return funding;
}
public void setFunding(List<Funder> funding) {
this.funding = funding;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public Granted getGranted() {
return granted;
}
public void setGranted(Granted granted) {
this.granted = granted;
}
public List<Programme> getH2020programme() {
return h2020programme;
}
public void setH2020programme(List<Programme> h2020programme) {
this.h2020programme = h2020programme;
}
}

View File

@ -0,0 +1,97 @@
package eu.dnetlib.dhp.eosc.model;
import java.io.Serializable;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
* @author miriam.baglioni
* @Date 26/01/23
*/
public class ProjectSummary implements Serializable {
@JsonSchema(description = "The OpenAIRE id for the project")
protected String id;// OpenAIRE id
@JsonSchema(description = "The grant agreement number")
protected String code;
@JsonSchema(description = "The acronym of the project")
protected String acronym;
protected String title;
@JsonSchema(description = "Information about the funder funding the project")
private FunderShort funder;
private Provenance provenance;
private Validated validated;
public void setValidated(Validated validated) {
this.validated = validated;
}
public Validated getValidated() {
return validated;
}
public Provenance getProvenance() {
return provenance;
}
public void setProvenance(Provenance provenance) {
this.provenance = provenance;
}
public FunderShort getFunder() {
return funder;
}
public void setFunder(FunderShort funders) {
this.funder = funders;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getAcronym() {
return acronym;
}
public void setAcronym(String acronym) {
this.acronym = acronym;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public static ProjectSummary newInstance(String id, String code, String acronym, String title, FunderShort funder) {
ProjectSummary project = new ProjectSummary();
project.setAcronym(acronym);
project.setCode(code);
project.setFunder(funder);
project.setId(id);
project.setTitle(title);
return project;
}
}

View File

@ -4,6 +4,7 @@ package eu.dnetlib.dhp.eosc.model;
import java.io.Serializable;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
/**
@ -20,12 +21,15 @@ public class Relation implements Serializable {
private String target;
@JsonSchema(description = "To represent the semantics of a relation between two entities")
@JsonIgnoreProperties(ignoreUnknown = true)
private RelType reltype;
@JsonSchema(description = "The reason why OpenAIRE holds the relation ")
@JsonIgnoreProperties(ignoreUnknown = true)
private Provenance provenance;
@JsonSchema(description = "The result type of the target for this relation")
@JsonIgnoreProperties(ignoreUnknown = true)
private String targetType;
public String getTargetType() {
@ -82,4 +86,12 @@ public class Relation implements Serializable {
relation.provenance = provenance;
return relation;
}
public static Relation newInstance(String source, String target) {
Relation relation = new Relation();
relation.source = source;
relation.target = target;
return relation;
}
}

View File

@ -24,13 +24,13 @@ public class Result implements Serializable {
private List<String> keywords;
@JsonSchema(description = "The list of organizations the result is affiliated to")
private List<Organization> affiliation;
private List<Affiliation> affiliation;
@JsonSchema(description = "The indicators for this result")
private Indicator indicator;
@JsonSchema(description = "List of projects (i.e. grants) that (co-)funded the production ofn the research results")
private List<Project> projects;
private List<ProjectSummary> projects;
@JsonSchema(
description = "Reference to a relevant research infrastructure, initiative or community (RI/RC) among those collaborating with OpenAIRE. Please see https://connect.openaire.eu")
@ -409,11 +409,11 @@ public class Result implements Serializable {
this.collectedfrom = collectedfrom;
}
public List<Project> getProjects() {
public List<ProjectSummary> getProjects() {
return projects;
}
public void setProjects(List<Project> projects) {
public void setProjects(List<ProjectSummary> projects) {
this.projects = projects;
}
@ -465,11 +465,11 @@ public class Result implements Serializable {
this.subject = subject;
}
public List<Organization> getAffiliation() {
public List<Affiliation> getAffiliation() {
return affiliation;
}
public void setAffiliation(List<Organization> affiliation) {
public void setAffiliation(List<Affiliation> affiliation) {
this.affiliation = affiliation;
}
}

View File

@ -54,7 +54,17 @@
<artifactId>dump-schema</artifactId>
<version>1.2.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>api</artifactId>
<version>1.2.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>api</artifactId>
<version>1.2.5-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>

View File

@ -0,0 +1,55 @@
package eu.dnetlib.dhp.oa.graph.dump;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.communityapi.model.*;
import eu.dnetlib.dhp.oa.graph.dump.eosc.CommunityMap;
public class UtilCommunityAPI {
private static final Logger log = LoggerFactory.getLogger(UtilCommunityAPI.class);
public CommunityMap getCommunityMap(boolean singleCommunity, String communityId)
throws IOException {
if (singleCommunity)
return getMap(Arrays.asList(getCommunity(communityId)));
return getMap(getValidCommunities());
}
private CommunityMap getMap(List<CommunityModel> communities) {
final CommunityMap map = new CommunityMap();
communities.forEach(c -> map.put(c.getId(), c.getName()));
return map;
}
private List<CommunityModel> getValidCommunities() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper
.readValue(eu.dnetlib.dhp.communityapi.QueryCommunityAPI.communities(), CommunitySummary.class)
.stream()
.filter(
community -> (community.getStatus().equals("all") || community.getStatus().equalsIgnoreCase("public"))
&&
(community.getType().equals("ri") || community.getType().equals("community")))
.collect(Collectors.toList());
}
private CommunityModel getCommunity(String id) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper
.readValue(eu.dnetlib.dhp.communityapi.QueryCommunityAPI.community(id), CommunityModel.class);
}
}

View File

@ -19,6 +19,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Affiliation;
import eu.dnetlib.dhp.eosc.model.OrganizationPid;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.schema.common.ModelConstants;
@ -91,7 +92,7 @@ public class ExtendEoscResultWithOrganization implements Serializable {
if (t2._2() != null) {
ResultOrganizations rOrg = new ResultOrganizations();
rOrg.setResultId(t2._1().getTarget());
eu.dnetlib.dhp.eosc.model.Organization org = new eu.dnetlib.dhp.eosc.model.Organization();
Affiliation org = new Affiliation();
org.setId(t2._2().getId());
if (Optional.ofNullable(t2._2().getLegalname()).isPresent()) {
org.setName(t2._2().getLegalname().getValue());
@ -135,7 +136,7 @@ public class ExtendEoscResultWithOrganization implements Serializable {
return first._1();
}
Result ret = first._1();
List<eu.dnetlib.dhp.eosc.model.Organization> affiliation = new ArrayList<>();
List<Affiliation> affiliation = new ArrayList<>();
Set<String> alreadyInsertedAffiliations = new HashSet<>();
affiliation.add(first._2().getAffiliation());
alreadyInsertedAffiliations.add(first._2().getAffiliation().getId());

View File

@ -5,6 +5,7 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
@ -15,10 +16,14 @@ import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spark_project.jetty.util.StringUtil;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Affiliation;
import eu.dnetlib.dhp.eosc.model.Country;
import eu.dnetlib.dhp.eosc.model.OrganizationPid;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.schema.common.ModelConstants;
@ -33,6 +38,8 @@ import scala.Tuple2;
public class ExtendEoscResultWithOrganizationStep2 implements Serializable {
private static final Logger log = LoggerFactory.getLogger(ExtendEoscResultWithOrganizationStep2.class);
private final static String UNKNOWN = "UNKNOWN";
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
@ -52,27 +59,97 @@ public class ExtendEoscResultWithOrganizationStep2 implements Serializable {
final String inputPath = parser.get("sourcePath");
log.info("inputPath: {}", inputPath);
final String resultPath = parser.get("resultPath");
log.info("resultPath: {}", resultPath);
final String workingPath = parser.get("workingPath");
log.info("workingPath: {}", workingPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
final String resultType = parser.get("resultType");
log.info("resultType: {}", resultType);
SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
Utils.removeOutputDir(spark, outputPath);
addOrganizations(spark, inputPath, outputPath, resultPath);
Utils.removeOutputDir(spark, workingPath + resultType + "extendedaffiliation");
Utils.removeOutputDir(spark, workingPath + resultType + "organization");
Utils.removeOutputDir(spark, workingPath + resultType + "resultOrganization");
addOrganizations(spark, inputPath, workingPath, resultType);
dumpOrganizationAndRelations(spark, inputPath, workingPath, resultType);
});
}
private static void addOrganizations(SparkSession spark, String inputPath, String outputPath,
String resultPath) {
private static void dumpOrganizationAndRelations(SparkSession spark, String inputPath, String workingPath,
String resultType) {
Dataset<Relation> relation = Utils
.readPath(spark, inputPath + "/relation", Relation.class)
.filter(
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference() &&
r.getSubRelType().equalsIgnoreCase(ModelConstants.AFFILIATION));
Dataset<Organization> organization = Utils
.readPath(spark, inputPath + "/organization", Organization.class)
.filter((FilterFunction<Organization>) o -> !o.getDataInfo().getDeletedbyinference());
Dataset<Result> result = Utils.readPath(spark, workingPath + resultType, Result.class);
// result -> organization takes the relation of type affiliation having the source in the results related to
// EOSC
Dataset<Relation> eoscRelation = result
.joinWith(relation, result.col("id").equalTo(relation.col("source")))
.map((MapFunction<Tuple2<Result, Relation>, Relation>) t2 -> t2._2(), Encoders.bean(Relation.class));
log.info("Number of affiliation relation for " + resultType + " = " + eoscRelation.count());
// from eoscRelation select the organization
Dataset<String> organizationIds = eoscRelation
.joinWith(organization, eoscRelation.col("target").equalTo(organization.col("id")))
.map(
(MapFunction<Tuple2<Relation, Organization>, String>) t2 -> t2._2().getId(),
Encoders.STRING())
.distinct();
organizationIds
.joinWith(organization, organizationIds.col("value").equalTo(organization.col("id")))
.map(
(MapFunction<Tuple2<String, Organization>, eu.dnetlib.dhp.eosc.model.Organization>) t2 -> mapOrganization(
t2._2()),
Encoders.bean(eu.dnetlib.dhp.eosc.model.Organization.class))
.filter(Objects::nonNull)
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(workingPath + resultType + "organization");
eoscRelation
.joinWith(organization, eoscRelation.col("target").equalTo(organization.col("id")))
.map(
(MapFunction<Tuple2<Relation, Organization>, eu.dnetlib.dhp.eosc.model.Relation>) t2 -> {
if (isToBeRemovedOrg(t2._2()))
return new eu.dnetlib.dhp.eosc.model.Relation();
return eu.dnetlib.dhp.eosc.model.Relation
.newInstance(t2._1().getSource(), t2._1().getTarget());
},
Encoders.bean(eu.dnetlib.dhp.eosc.model.Relation.class))
.filter((FilterFunction<eu.dnetlib.dhp.eosc.model.Relation>) r -> StringUtil.isNotBlank(r.getSource()))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(workingPath + resultType + "resultOrganization");
}
private static void addOrganizations(SparkSession spark, String inputPath, String workingPath,
String resultType) {
Dataset<Result> results = Utils
.readPath(spark, resultPath, Result.class);
.readPath(spark, workingPath + resultType, Result.class);
Dataset<Relation> relations = Utils
.readPath(spark, inputPath + "/relation", Relation.class)
@ -80,15 +157,19 @@ public class ExtendEoscResultWithOrganizationStep2 implements Serializable {
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference() &&
!r.getDataInfo().getInvisible() && r.getSubRelType().equalsIgnoreCase(ModelConstants.AFFILIATION));
Dataset<Organization> organizations = Utils.readPath(spark, inputPath + "/organization", Organization.class);
Dataset<Organization> organizations = Utils
.readPath(spark, inputPath + "/organization", Organization.class)
.filter((FilterFunction<Organization>) o -> !o.getDataInfo().getDeletedbyinference());
Dataset<ResultOrganizations> resultOrganization = relations
.joinWith(organizations, relations.col("source").equalTo(organizations.col("id")), "left")
.joinWith(organizations, relations.col("source").equalTo(organizations.col("id")))
.map((MapFunction<Tuple2<Relation, Organization>, ResultOrganizations>) t2 -> {
ResultOrganizations rOrg = new ResultOrganizations();
if (t2._2() != null) {
ResultOrganizations rOrg = new ResultOrganizations();
rOrg.setResultId(t2._1().getTarget());
eu.dnetlib.dhp.eosc.model.Organization org = new eu.dnetlib.dhp.eosc.model.Organization();
Affiliation org = new Affiliation();
org.setId(t2._2().getId());
if (Optional.ofNullable(t2._2().getLegalname()).isPresent()) {
org.setName(t2._2().getLegalname().getValue());
@ -115,41 +196,149 @@ public class ExtendEoscResultWithOrganizationStep2 implements Serializable {
rOrg.setAffiliation(org);
return rOrg;
}
return null;
return rOrg;
}, Encoders.bean(ResultOrganizations.class))
.filter(Objects::nonNull);
.filter((FilterFunction<ResultOrganizations>) ro -> ro.getResultId() != null);
// resultOrganization.count();
results
.joinWith(resultOrganization, results.col("id").equalTo(resultOrganization.col("resultId")), "left")
.groupByKey(
(MapFunction<Tuple2<Result, ResultOrganizations>, String>) t2 -> t2._1().getId(), Encoders.STRING())
.mapGroups(
(MapGroupsFunction<String, Tuple2<Result, ResultOrganizations>, Result>) (s, it) -> {
Tuple2<Result, ResultOrganizations> first = it.next();
if (first._2() == null) {
return first._1();
}
Result ret = first._1();
List<eu.dnetlib.dhp.eosc.model.Organization> affiliation = new ArrayList<>();
Set<String> alreadyInsertedAffiliations = new HashSet<>();
affiliation.add(first._2().getAffiliation());
alreadyInsertedAffiliations.add(first._2().getAffiliation().getId());
it.forEachRemaining(res -> {
if (!alreadyInsertedAffiliations.contains(res._2().getAffiliation().getId())) {
affiliation.add(res._2().getAffiliation());
alreadyInsertedAffiliations.add(res._2().getAffiliation().getId());
}
});
ret.setAffiliation(affiliation);
return ret;
}, Encoders.bean(Result.class))
(MapGroupsFunction<String, Tuple2<Result, ResultOrganizations>, Result>) (s, it) -> addAffiliation(it),
Encoders.bean(Result.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath);
.json(workingPath + resultType + "extendedaffiliation");
}
@Nullable
private static ResultOrganizations getResultOrganizations(Tuple2<Relation, Organization> t2) {
if (t2._2() != null) {
Organization organization = t2._2();
ResultOrganizations rOrg = new ResultOrganizations();
rOrg.setResultId(t2._1().getTarget());
Affiliation org = new Affiliation();
org.setId(organization.getId());
if (Optional.ofNullable(organization.getLegalname()).isPresent()) {
org.setName(organization.getLegalname().getValue());
} else {
org.setName("");
}
HashMap<String, Set<String>> organizationPids = new HashMap<>();
if (Optional.ofNullable(organization.getPid()).isPresent())
organization.getPid().forEach(p -> {
if (!organizationPids.containsKey(p.getQualifier().getClassid()))
organizationPids.put(p.getQualifier().getClassid(), new HashSet<>());
organizationPids.get(p.getQualifier().getClassid()).add(p.getValue());
});
List<OrganizationPid> pids = new ArrayList<>();
for (String key : organizationPids.keySet()) {
for (String value : organizationPids.get(key)) {
OrganizationPid pid = new OrganizationPid();
pid.setValue(value);
pid.setType(key);
pids.add(pid);
}
}
org.setPid(pids);
rOrg.setAffiliation(org);
return rOrg;
}
return null;
}
private static Result addAffiliation(Iterator<Tuple2<Result, ResultOrganizations>> it) {
Tuple2<Result, ResultOrganizations> first = it.next();
if (first._2() == null) {
return first._1();
}
Result ret = first._1();
List<Affiliation> affiliation = new ArrayList<>();
Set<String> alreadyInsertedAffiliations = new HashSet<>();
affiliation.add(first._2().getAffiliation());
alreadyInsertedAffiliations.add(first._2().getAffiliation().getId());
it.forEachRemaining(res -> {
if (!alreadyInsertedAffiliations.contains(res._2().getAffiliation().getId())) {
affiliation.add(res._2().getAffiliation());
alreadyInsertedAffiliations.add(res._2().getAffiliation().getId());
}
});
ret.setAffiliation(affiliation);
return ret;
}
private static eu.dnetlib.dhp.eosc.model.Organization mapOrganization(Organization org) {
if (isToBeRemovedOrg(org))
return null;
eu.dnetlib.dhp.eosc.model.Organization organization = new eu.dnetlib.dhp.eosc.model.Organization();
Optional
.ofNullable(org.getLegalshortname())
.ifPresent(value -> organization.setLegalshortname(value.getValue()));
Optional
.ofNullable(org.getLegalname())
.ifPresent(value -> organization.setLegalname(value.getValue()));
Optional
.ofNullable(org.getWebsiteurl())
.ifPresent(value -> organization.setWebsiteurl(value.getValue()));
Optional
.ofNullable(org.getAlternativeNames())
.ifPresent(
value -> organization
.setAlternativenames(
value
.stream()
.map(v -> v.getValue())
.collect(Collectors.toList())));
Optional
.ofNullable(org.getCountry())
.ifPresent(
value -> {
if (!value.getClassid().equals(UNKNOWN)) {
organization
.setCountry(
Country.newInstance(value.getClassid(), value.getClassname()));
}
});
Optional
.ofNullable(org.getId())
.ifPresent(value -> organization.setId(value));
Optional
.ofNullable(org.getPid())
.ifPresent(
value -> organization
.setPid(
value
.stream()
.map(p -> OrganizationPid.newInstance(p.getQualifier().getClassid(), p.getValue()))
.collect(Collectors.toList())));
return organization;
}
private static boolean isToBeRemovedOrg(Organization org) {
if (Boolean.TRUE.equals(org.getDataInfo().getDeletedbyinference()))
return true;
if (!Optional.ofNullable(org.getLegalname()).isPresent()
&& !Optional.ofNullable(org.getLegalshortname()).isPresent())
return true;
return false;
}
}

View File

@ -2,9 +2,8 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import java.io.Serializable;
import java.util.List;
import eu.dnetlib.dhp.eosc.model.Organization;
import eu.dnetlib.dhp.eosc.model.Affiliation;
/**
* @author miriam.baglioni
@ -12,7 +11,7 @@ import eu.dnetlib.dhp.eosc.model.Organization;
*/
public class ResultOrganizations implements Serializable {
private String resultId;
private Organization affiliation;
private Affiliation affiliation;
public String getResultId() {
return resultId;
@ -22,11 +21,11 @@ public class ResultOrganizations implements Serializable {
this.resultId = resultId;
}
public Organization getAffiliation() {
public Affiliation getAffiliation() {
return affiliation;
}
public void setAffiliation(Organization affiliation) {
public void setAffiliation(Affiliation affiliation) {
this.affiliation = affiliation;
}
}

View File

@ -4,11 +4,11 @@ package eu.dnetlib.dhp.oa.graph.dump.eosc;
import java.io.Serializable;
import java.util.List;
import eu.dnetlib.dhp.eosc.model.Project;
import eu.dnetlib.dhp.eosc.model.ProjectSummary;
public class ResultProject implements Serializable {
private String resultId;
private List<Project> projectsList;
private List<ProjectSummary> projectsList;
public String getResultId() {
return resultId;
@ -18,11 +18,11 @@ public class ResultProject implements Serializable {
this.resultId = resultId;
}
public List<Project> getProjectsList() {
public List<ProjectSummary> getProjectsList() {
return projectsList;
}
public void setProjectsList(List<Project> projectsList) {
public void setProjectsList(List<ProjectSummary> projectsList) {
this.projectsList = projectsList;
}
}

View File

@ -13,16 +13,14 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.dom4j.DocumentException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.dhp.oa.graph.dump.UtilCommunityAPI;
/**
* This class connects with the IS related to the isLookUpUrl got as parameter. It saves the information about the
* This class connects with the community APIs for production. It saves the information about the
* context that will guide the dump of the results. The information saved is a HashMap. The key is the id of a community
* - research infrastructure/initiative , the value is the label of the research community - research
* infrastructure/initiative.
@ -31,11 +29,11 @@ import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
public class SaveCommunityMap implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SaveCommunityMap.class);
private final transient QueryInformationSystem queryInformationSystem;
private final transient UtilCommunityAPI queryInformationSystem;
private final transient BufferedWriter writer;
public SaveCommunityMap(String hdfsPath, String hdfsNameNode, String isLookUpUrl) throws IOException {
public SaveCommunityMap(String hdfsPath, String hdfsNameNode) throws IOException {
final Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(conf);
@ -45,8 +43,7 @@ public class SaveCommunityMap implements Serializable {
fileSystem.delete(hdfsWritePath, true);
}
queryInformationSystem = new QueryInformationSystem();
queryInformationSystem.setIsLookUp(Utils.getIsLookUpService(isLookUpUrl));
queryInformationSystem = new UtilCommunityAPI();
FSDataOutputStream fos = fileSystem.create(hdfsWritePath);
writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8));
@ -68,19 +65,23 @@ public class SaveCommunityMap implements Serializable {
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
final String isLookUpUrl = parser.get("isLookUpUrl");
log.info("isLookUpUrl: {}", isLookUpUrl);
final Boolean singleCommunity = Optional
.ofNullable(parser.get("singleDeposition"))
.map(Boolean::valueOf)
.orElse(false);
final SaveCommunityMap scm = new SaveCommunityMap(outputPath, nameNode, isLookUpUrl);
final String community_id = Optional.ofNullable(parser.get("communityId")).orElse(null);
scm.saveCommunityMap();
final SaveCommunityMap scm = new SaveCommunityMap(outputPath, nameNode);
scm.saveCommunityMap(singleCommunity, community_id);
}
private void saveCommunityMap()
throws ISLookUpException, IOException, DocumentException, SAXException {
private void saveCommunityMap(boolean singleCommunity, String communityId)
throws IOException {
final String communityMapString = Utils.OBJECT_MAPPER
.writeValueAsString(queryInformationSystem.getCommunityMap());
.writeValueAsString(queryInformationSystem.getCommunityMap(singleCommunity, communityId));
log.info("communityMap {} ", communityMapString);
writer
.write(

View File

@ -0,0 +1,167 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.MapGroupsFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Organization;
import eu.dnetlib.dhp.eosc.model.Project;
import eu.dnetlib.dhp.eosc.model.Provenance;
import eu.dnetlib.dhp.eosc.model.RelType;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Relation;
import scala.Tuple2;
/**
* @author miriam.baglioni
* @Date 12/01/23
*/
public class SparkDumpOrganizationProject implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SparkDumpOrganizationProject.class);
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
SparkDumpOrganizationProject.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/graph/dump/eosc_organizationprojectrelations_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String inputPath = parser.get("sourcePath");
log.info("inputPath: {}", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
final String workingPath = parser.get("workingPath");
log.info("workingPath: {}", workingPath);
SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
Utils.removeOutputDir(spark, outputPath + "/organizationProject");
dumpRelation(spark, inputPath, outputPath, workingPath);
});
}
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath, String workingPath) {
Dataset<Organization> organization = Utils
.readPath(spark, workingPath + "publicationorganization", Organization.class)
.union(Utils.readPath(spark, workingPath + "datasetorganization", Organization.class))
.union(Utils.readPath(spark, workingPath + "softwareorganization", Organization.class))
.union(Utils.readPath(spark, workingPath + "otherresearchproductorganization", Organization.class))
.groupByKey((MapFunction<Organization, String>) o -> o.getId(), Encoders.STRING())
.mapGroups(
(MapGroupsFunction<String, Organization, Organization>) (k, v) -> v.next(),
Encoders.bean(Organization.class));
Dataset<Project> project = Utils
.readPath(spark, workingPath + "publicationproject", Project.class)
.union(Utils.readPath(spark, workingPath + "datasetproject", Project.class))
.union(Utils.readPath(spark, workingPath + "softwareproject", Project.class))
.union(Utils.readPath(spark, workingPath + "otherresearchproductproject", Project.class))
.groupByKey((MapFunction<Project, String>) o -> o.getId(), Encoders.STRING())
.mapGroups((MapGroupsFunction<String, Project, Project>) (k, v) -> v.next(), Encoders.bean(Project.class));
organization
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath + "organization");
project
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath + "project");
Dataset<Relation> relation = Utils
.readPath(spark, inputPath + "/relation", Relation.class)
.filter(
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference()
&& r.getRelClass().equalsIgnoreCase(ModelConstants.IS_PARTICIPANT));
Dataset<Relation> eoscOrgs = relation
.joinWith(organization, relation.col("source").equalTo(organization.col("id")))
.map((MapFunction<Tuple2<Relation, Organization>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class));
eoscOrgs
.joinWith(project, eoscOrgs.col("target").equalTo(project.col("id")))
.map(
(MapFunction<Tuple2<Relation, Project>, eu.dnetlib.dhp.eosc.model.Relation>) t2 -> eu.dnetlib.dhp.eosc.model.Relation
.newInstance(t2._1().getSource(), t2._1().getTarget()),
Encoders.bean(eu.dnetlib.dhp.eosc.model.Relation.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath + "organizationProject");
Utils
.readPath(spark, workingPath + "publicationresultOrganization", eu.dnetlib.dhp.eosc.model.Relation.class)
.union(
Utils
.readPath(
spark, workingPath + "datasetresultOrganization", eu.dnetlib.dhp.eosc.model.Relation.class))
.union(
Utils
.readPath(
spark, workingPath + "softwareresultOrganization", eu.dnetlib.dhp.eosc.model.Relation.class))
.union(
Utils
.readPath(
spark, workingPath + "otherresearchproductresultOrganization",
eu.dnetlib.dhp.eosc.model.Relation.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath + "resultOrganization");
Utils
.readPath(spark, workingPath + "publicationresultProject", eu.dnetlib.dhp.eosc.model.Relation.class)
.union(
Utils.readPath(spark, workingPath + "datasetresultProject", eu.dnetlib.dhp.eosc.model.Relation.class))
.union(
Utils.readPath(spark, workingPath + "softwareresultProject", eu.dnetlib.dhp.eosc.model.Relation.class))
.union(
Utils
.readPath(
spark, workingPath + "otherresearchproductresultProject",
eu.dnetlib.dhp.eosc.model.Relation.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath + "resultProject");
}
}

View File

@ -1,115 +0,0 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Provenance;
import eu.dnetlib.dhp.eosc.model.RelType;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Relation;
/**
* @author miriam.baglioni
* @Date 12/01/23
*/
public class SparkDumpRelation implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SparkDumpRelation.class);
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
SparkDumpRelation.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
final String inputPath = parser.get("sourcePath");
log.info("inputPath: {}", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
SparkConf conf = new SparkConf();
runWithSparkSession(
conf,
isSparkSessionManaged,
spark -> {
Utils.removeOutputDir(spark, outputPath);
dumpRelation(spark, inputPath, outputPath);
});
}
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath) {
Dataset<Relation> relations = Utils.readPath(spark, inputPath, Relation.class);
relations
.map((MapFunction<Relation, eu.dnetlib.dhp.eosc.model.Relation>) relation -> {
eu.dnetlib.dhp.eosc.model.Relation relNew = new eu.dnetlib.dhp.eosc.model.Relation();
relNew
.setSource(
relation.getSource());
relNew
.setTarget(
relation.getTarget());
relNew
.setReltype(
RelType
.newInstance(
relation.getRelClass(),
relation.getSubRelType()));
Optional<DataInfo> odInfo = Optional.ofNullable(relation.getDataInfo());
if (odInfo.isPresent()) {
DataInfo dInfo = odInfo.get();
if (Optional.ofNullable(dInfo.getProvenanceaction()).isPresent() &&
Optional.ofNullable(dInfo.getProvenanceaction().getClassname()).isPresent()) {
relNew
.setProvenance(
Provenance
.newInstance(
dInfo.getProvenanceaction().getClassname(),
dInfo.getTrust()));
}
}
return relNew;
}, Encoders.bean(eu.dnetlib.dhp.eosc.model.Relation.class))
.write()
.option("compression", "gzip")
.mode(SaveMode.Append)
.json(outputPath);
}
}

View File

@ -26,8 +26,8 @@ import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Funder;
import eu.dnetlib.dhp.eosc.model.Project;
import eu.dnetlib.dhp.eosc.model.FunderShort;
import eu.dnetlib.dhp.eosc.model.ProjectSummary;
import eu.dnetlib.dhp.eosc.model.Provenance;
import eu.dnetlib.dhp.eosc.model.Validated;
import eu.dnetlib.dhp.schema.common.ModelConstants;
@ -103,9 +103,9 @@ public class SparkPrepareResultProject implements Serializable {
rp.setResultId(s);
eu.dnetlib.dhp.schema.oaf.Project p = first._1();
projectSet.add(p.getId());
Project ps = getProject(p, first._2);
ProjectSummary ps = getProject(p, first._2);
List<Project> projList = new ArrayList<>();
List<ProjectSummary> projList = new ArrayList<>();
projList.add(ps);
rp.setProjectsList(projList);
it.forEachRemaining(c -> {
@ -132,8 +132,8 @@ public class SparkPrepareResultProject implements Serializable {
.json(outputPath);
}
private static Project getProject(eu.dnetlib.dhp.schema.oaf.Project op, Relation relation) {
Project p = Project
private static ProjectSummary getProject(eu.dnetlib.dhp.schema.oaf.Project op, Relation relation) {
ProjectSummary p = ProjectSummary
.newInstance(
op.getId(),
op.getCode().getValue(),
@ -148,7 +148,7 @@ public class SparkPrepareResultProject implements Serializable {
Optional
.ofNullable(op.getFundingtree())
.map(value -> {
List<Funder> tmp = value
List<FunderShort> tmp = value
.stream()
.map(ft -> getFunder(ft.getValue()))
.collect(Collectors.toList());
@ -174,8 +174,8 @@ public class SparkPrepareResultProject implements Serializable {
}
private static Funder getFunder(String fundingtree) {
final Funder f = new Funder();
private static FunderShort getFunder(String fundingtree) {
final FunderShort f = new FunderShort();
final Document doc;
try {
final SAXReader reader = new SAXReader();

View File

@ -80,14 +80,12 @@ public class SparkSelectRelation implements Serializable {
Dataset<Tuple2<String, String>> resultIds = Utils
.readPath(spark, outputPath + "/publication", Result.class)
.map(
(MapFunction<Result, Tuple2<String, String>>) p -> new Tuple2<>(p.getId(), p.getType()),
Encoders.tuple(Encoders.STRING(), Encoders.STRING()))
.union(
Utils
.readPath(spark, outputPath + "/dataset", Result.class)
.map(
(MapFunction<Result, Tuple2<String, String>>) d -> new Tuple2<>(d.getId(), d.getType()),
Encoders.tuple(Encoders.STRING(), Encoders.STRING())))

View File

@ -4,23 +4,33 @@ package eu.dnetlib.dhp.oa.graph.dump.eosc;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.*;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Node;
import org.dom4j.io.SAXReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.eosc.model.*;
import eu.dnetlib.dhp.oa.graph.dump.Constants;
import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.Project;
import scala.Array;
import scala.Tuple2;
public class SparkUpdateProjectInfo implements Serializable {
@ -47,12 +57,18 @@ public class SparkUpdateProjectInfo implements Serializable {
final String inputPath = parser.get("sourcePath");
log.info("inputPath: {}", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
final String workingPath = parser.get("workingPath");
log.info("workingPath: {}", workingPath);
final String preparedInfoPath = parser.get("preparedInfoPath");
log.info("preparedInfoPath: {}", preparedInfoPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath: {}", outputPath);
final String resultType = parser.get("resultType");
log.info("resultType: {}", resultType);
final String dumpType = Optional
.ofNullable(parser.get("dumpType"))
.orElse(Constants.DUMPTYPE.COMMUNITY.getType());
@ -64,19 +80,20 @@ public class SparkUpdateProjectInfo implements Serializable {
conf,
isSparkSessionManaged,
spark -> {
Utils.removeOutputDir(spark, outputPath);
extend(spark, inputPath, outputPath, preparedInfoPath);
Utils.removeOutputDir(spark, workingPath + resultType + "extendedproject");
extend(spark, inputPath, workingPath, preparedInfoPath, outputPath, resultType);
});
}
private static void extend(
SparkSession spark,
String inputPath,
String workingPath,
String preparedInfoPath,
String outputPath,
String preparedInfoPath) {
Dataset<Result> result = Utils.readPath(spark, inputPath, Result.class);
String resultType) {
Dataset<Result> result = Utils.readPath(spark, workingPath + resultType + "extendedaffiliation", Result.class);
Dataset<ResultProject> resultProject = Utils.readPath(spark, preparedInfoPath, ResultProject.class);
result
.joinWith(
@ -90,7 +107,209 @@ public class SparkUpdateProjectInfo implements Serializable {
.write()
.option("compression", "gzip")
.mode(SaveMode.Append)
.json(outputPath);
.json(workingPath + resultType + "extendedproject");
Dataset<Project> project = Utils.readPath(spark, inputPath + "/project", Project.class);
Dataset<String> projectIds = result
.joinWith(resultProject, result.col("id").equalTo(resultProject.col("resultId")))
.flatMap(
(FlatMapFunction<Tuple2<Result, ResultProject>, String>) t2 -> t2
._2()
.getProjectsList()
.stream()
.map(p -> p.getId())
.collect(Collectors.toList())
.iterator(),
Encoders.STRING())
.distinct();
projectIds
.joinWith(project, projectIds.col("value").equalTo(project.col("id")))
.map(
(MapFunction<Tuple2<String, Project>, eu.dnetlib.dhp.eosc.model.Project>) t2 -> mapProject(t2._2()),
Encoders.bean(eu.dnetlib.dhp.eosc.model.Project.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(workingPath + resultType + "project");
result
.joinWith(
resultProject, result.col("id").equalTo(resultProject.col("resultId")))
.map(
(MapFunction<Tuple2<Result, ResultProject>, ResultProject>) t2 -> t2._2(),
Encoders.bean(ResultProject.class))
.flatMap(
(FlatMapFunction<ResultProject, Relation>) rp -> rp
.getProjectsList()
.stream()
.map(p -> Relation.newInstance(rp.getResultId(), p.getId()))
.collect(Collectors.toList())
.iterator(),
Encoders.bean(Relation.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(workingPath + resultType + "resultProject");
}
private static eu.dnetlib.dhp.eosc.model.Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p)
throws DocumentException {
if (Boolean.TRUE.equals(p.getDataInfo().getDeletedbyinference()))
return null;
eu.dnetlib.dhp.eosc.model.Project project = new eu.dnetlib.dhp.eosc.model.Project();
Optional
.ofNullable(p.getId())
.ifPresent(id -> project.setId(id));
Optional
.ofNullable(p.getWebsiteurl())
.ifPresent(w -> project.setWebsiteurl(w.getValue()));
Optional
.ofNullable(p.getCode())
.ifPresent(code -> project.setCode(code.getValue()));
Optional
.ofNullable(p.getAcronym())
.ifPresent(acronynim -> project.setAcronym(acronynim.getValue()));
Optional
.ofNullable(p.getTitle())
.ifPresent(title -> project.setTitle(title.getValue()));
Optional
.ofNullable(p.getStartdate())
.ifPresent(sdate -> project.setStartdate(sdate.getValue()));
Optional
.ofNullable(p.getEnddate())
.ifPresent(edate -> project.setEnddate(edate.getValue()));
Optional
.ofNullable(p.getCallidentifier())
.ifPresent(cide -> project.setCallidentifier(cide.getValue()));
Optional
.ofNullable(p.getKeywords())
.ifPresent(key -> project.setKeywords(key.getValue()));
Optional<Field<String>> omandate = Optional.ofNullable(p.getOamandatepublications());
Optional<Field<String>> oecsc39 = Optional.ofNullable(p.getEcsc39());
boolean mandate = false;
if (omandate.isPresent()) {
if (omandate.get().getValue().equals("true")) {
mandate = true;
}
}
if (oecsc39.isPresent()) {
if (oecsc39.get().getValue().equals("true")) {
mandate = true;
}
}
project.setOpenaccessmandateforpublications(mandate);
project.setOpenaccessmandatefordataset(false);
Optional
.ofNullable(p.getEcarticle29_3())
.ifPresent(oamandate -> project.setOpenaccessmandatefordataset(oamandate.getValue().equals("true")));
project
.setSubject(
Optional
.ofNullable(p.getSubjects())
.map(subjs -> subjs.stream().map(s -> s.getValue()).collect(Collectors.toList()))
.orElse(new ArrayList<>()));
Optional
.ofNullable(p.getSummary())
.ifPresent(summary -> project.setSummary(summary.getValue()));
Optional<Float> ofundedamount = Optional.ofNullable(p.getFundedamount());
Optional<Field<String>> ocurrency = Optional.ofNullable(p.getCurrency());
Optional<Float> ototalcost = Optional.ofNullable(p.getTotalcost());
if (ocurrency.isPresent()) {
if (ofundedamount.isPresent()) {
if (ototalcost.isPresent()) {
project
.setGranted(
Granted.newInstance(ocurrency.get().getValue(), ototalcost.get(), ofundedamount.get()));
} else {
project.setGranted(Granted.newInstance(ocurrency.get().getValue(), ofundedamount.get()));
}
}
}
project
.setH2020programme(
Optional
.ofNullable(p.getH2020classification())
.map(
classification -> classification
.stream()
.map(
c -> Programme
.newInstance(
c.getH2020Programme().getCode(), c.getH2020Programme().getDescription()))
.collect(Collectors.toList()))
.orElse(new ArrayList<>()));
Optional<List<Field<String>>> ofundTree = Optional
.ofNullable(p.getFundingtree());
List<Funder> funList = new ArrayList<>();
if (ofundTree.isPresent()) {
for (Field<String> fundingtree : ofundTree.get()) {
funList.add(getFunder(fundingtree.getValue()));
}
}
project.setFunding(funList);
return project;
}
public static Funder getFunder(String fundingtree) throws DocumentException {
Funder f = new Funder();
final Document doc;
doc = new SAXReader().read(new StringReader(fundingtree));
f.setShortName(((org.dom4j.Node) (doc.selectNodes("//funder/shortname").get(0))).getText());
f.setName(((org.dom4j.Node) (doc.selectNodes("//funder/name").get(0))).getText());
f.setJurisdiction(((org.dom4j.Node) (doc.selectNodes("//funder/jurisdiction").get(0))).getText());
String id = "";
StringBuilder bld = new StringBuilder();
int level = 0;
List<org.dom4j.Node> nodes = doc.selectNodes("//funding_level_" + level);
while (!nodes.isEmpty()) {
for (org.dom4j.Node n : nodes) {
List node = n.selectNodes("./id");
id = ((org.dom4j.Node) node.get(0)).getText();
id = id.substring(id.indexOf("::") + 2);
node = n.selectNodes("./description");
bld.append(((Node) node.get(0)).getText() + " - ");
}
level += 1;
nodes = doc.selectNodes("//funding_level_" + level);
}
String description = bld.toString();
if (!id.equals("")) {
Fundings fundings = new Fundings();
fundings.setId(id);
fundings.setDescription(description.substring(0, description.length() - 3).trim());
f.setFunding_stream(fundings);
}
return f;
}

View File

@ -0,0 +1,3 @@
sourcePath=/tmp/prod_provision/graph/20_graph_blacklisted
outputPath=/tmp/miriam/graph_dumps/country_PT
country=PT

View File

@ -0,0 +1,16 @@
#PROPERTIES FOR EOSC DUMP
sourcePath=/tmp/prod_provision/graph/20_graph_blacklisted
outputPath=/tmp/miriam/graph_dumps/eosc_prod_extended
#accessToken for the openaire sandbox following
accessToken=OzzOsyucEIHxCEfhlpsMo3myEiwpCza3trCRL7ddfGTAK9xXkIP2MbXd6Vg4
connectionUrl=https://sandbox.zenodo.org/api/deposit/depositions
singleDeposition=false
conceptRecordId=1094304
depositionType=version
metadata=""
depositionId=6616871
removeSet=merges;isMergedIn
postgresURL=jdbc:postgresql://postgresql.services.openaire.eu:5432/dnet_openaireplus
postgresUser=dnet
postgresPassword=dnetPwd
isLookUpUrl=http://services.openaire.eu:8280/is/services/isLookUp?wsdl

View File

@ -95,7 +95,6 @@
<main-class>eu.dnetlib.dhp.oa.graph.dump.eosc.SaveCommunityMap</main-class>
<arg>--outputPath</arg><arg>${workingDir}/communityMap</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
</java>
<ok to="get_ds_master_duplicate"/>
<error to="Kill"/>
@ -164,32 +163,9 @@
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/publication</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/publicationextendedaffiliation</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<action name="extend_publication_with_indicators">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Extend Dump Publication with indicators </name>
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.ExtendWithUsageCounts</class>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--actionSetPath</arg><arg>${actionSetPath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/publicationextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/publicationextended</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>publication</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
@ -240,37 +216,13 @@
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/dataset</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/datasetextendedaffiliation</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>dataset</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<action name="extend_dataset_with_indicators">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Extend Dump Dataset with indicators </name>
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.ExtendWithUsageCounts</class>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--actionSetPath</arg><arg>${actionSetPath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/datasetextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/datasetextended</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<action name="dump_eosc_orp">
<spark xmlns="uri:oozie:spark-action:0.2">
@ -316,32 +268,9 @@
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproductextendedaffiliation</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<action name="extend_orp_with_indicators">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Extend Dump ORP with indicators </name>
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.ExtendWithUsageCounts</class>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--actionSetPath</arg><arg>${actionSetPath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/otherresearchproductextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproductextended</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>otherresearchproduct</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
@ -392,37 +321,15 @@
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/software</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/softwareextendedaffiliation</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<action name="extend_software_with_indicators">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Extend Dump ORP with indicators </name>
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.ExtendWithUsageCounts</class>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--actionSetPath</arg><arg>${actionSetPath}</arg>
<arg>--resultPath</arg><arg>${workingDir}/dump/softwareextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/softwareextended</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>software</arg>
</spark>
<ok to="wait_eosc_dump"/>
<error to="Kill"/>
</action>
<join name="wait_eosc_dump" to="prepareResultProject"/>
<action name="prepareResultProject">
@ -473,10 +380,12 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/dump/publicationextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/publicationextendedproject</arg>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
<arg>--dumpType</arg><arg>eosc</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>publication</arg>
</spark>
<ok to="join_extend"/>
<error to="Kill"/>
@ -499,10 +408,12 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/dump/datasetextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/datasetextendedproject</arg>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
<arg>--dumpType</arg><arg>eosc</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>dataset</arg>
</spark>
<ok to="join_extend"/>
<error to="Kill"/>
@ -525,10 +436,12 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproductextendedaffiliation</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproductextendedproject</arg>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
<arg>--dumpType</arg><arg>eosc</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>otherresearchproduct</arg>
</spark>
<ok to="join_extend"/>
<error to="Kill"/>
@ -551,11 +464,12 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${workingDir}/dump/softwareextendedaffiliation
</arg>
<arg>--outputPath</arg><arg>${workingDir}/dump/softwareextendedproject</arg>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
<arg>--dumpType</arg><arg>eosc</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--resultType</arg><arg>software</arg>
</spark>
<ok to="join_extend"/>
<error to="Kill"/>
@ -696,7 +610,32 @@
</action>
<join name="join_extend_relation" to="make_archive"/>
<join name="join_extend_relation" to="dump_organization_project_relations"/>
<action name="dump_organization_project_relations">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Dump for the relations between organization and projects in the subset of entities relevant for EOSC</name>
<class>eu.dnetlib.dhp.oa.graph.dump.eosc.SparkDumpOrganizationProject</class>
<jar>dump-${projectVersion}.jar</jar>
<spark-opts>
--executor-memory=${sparkExecutorMemory}
--executor-cores=${sparkExecutorCores}
--driver-memory=${sparkDriverMemory}
--conf spark.extraListeners=${spark2ExtraListeners}
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
</spark-opts>
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
<arg>--outputPath</arg><arg>${outputPath}/dump/</arg>
<arg>--workingPath</arg><arg>${workingDir}/dump/</arg>
</spark>
<ok to="make_archive"/>
<error to="Kill"/>
</action>
<action name="make_archive">
<java>
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>

View File

@ -1,12 +1,7 @@
[
{
"paramName":"is",
"paramLongName":"isLookUpUrl",
"paramDescription": "URL of the isLookUp Service",
"paramRequired": true
},
{
"paramName":"nn",
"paramLongName":"nameNode",

View File

@ -28,7 +28,12 @@
"paramLongName":"workingPath",
"paramDescription": "The path to the community map",
"paramRequired": false
}
},
{
"paramName":"rt",
"paramLongName":"resultType",
"paramDescription": "The path to the community map",
"paramRequired": false}
]

View File

@ -0,0 +1,32 @@
[
{
"paramName":"s",
"paramLongName":"sourcePath",
"paramDescription": "the name node",
"paramRequired": true
},
{
"paramName": "out",
"paramLongName": "outputPath",
"paramDescription": "the path used to store temporary output files",
"paramRequired": true
},
{
"paramName": "issm",
"paramLongName": "isSparkSessionManaged",
"paramDescription": "the path used to store temporary output files",
"paramRequired": false
},
{
"paramName": "wp",
"paramLongName": "workingPath",
"paramDescription": "the path used to store temporary output files",
"paramRequired": true
}
]

View File

@ -29,6 +29,18 @@
"paramLongName": "dumpType",
"paramDescription": "the dump type",
"paramRequired": false
},
{
"paramName": "wp",
"paramLongName": "workingPath",
"paramDescription": "the working path",
"paramRequired": false
},
{
"paramName": "rt",
"paramLongName": "resultType",
"paramDescription": "the working path",
"paramRequired": false
}
]

View File

@ -0,0 +1,23 @@
[
{
"paramName":"s",
"paramLongName":"sourcePath",
"paramDescription": "the path of the sequencial file to read",
"paramRequired": true
},
{
"paramName": "out",
"paramLongName": "outputPath",
"paramDescription": "the path used to store temporary output files",
"paramRequired": true
},
{
"paramName": "ssm",
"paramLongName": "isSparkSessionManaged",
"paramDescription": "true if the spark session is managed, false otherwise",
"paramRequired": false
}
]

View File

@ -24,7 +24,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.eosc.model.Project;
import eu.dnetlib.dhp.eosc.model.ProjectSummary;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.oa.graph.dump.eosc.SparkUpdateProjectInfo;
@ -194,15 +194,15 @@ public class UpdateProjectInfoTest {
.filter("id = '50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2' and code = '119027'")
.count());
Project project = verificationDataset
ProjectSummary project = verificationDataset
.map(
(MapFunction<Result, Project>) cr -> cr
(MapFunction<Result, ProjectSummary>) cr -> cr
.getProjects()
.stream()
.filter(p -> p.getValidated() != null)
.collect(Collectors.toList())
.get(0),
Encoders.bean(Project.class))
Encoders.bean(ProjectSummary.class))
.first();
Assertions.assertTrue(project.getFunder().getName().equals("Academy of Finland"));
@ -213,13 +213,13 @@ public class UpdateProjectInfoTest {
project = verificationDataset
.map(
(MapFunction<Result, Project>) cr -> cr
(MapFunction<Result, ProjectSummary>) cr -> cr
.getProjects()
.stream()
.filter(p -> p.getValidated() == null)
.collect(Collectors.toList())
.get(0),
Encoders.bean(Project.class))
Encoders.bean(ProjectSummary.class))
.first();
Assertions.assertTrue(project.getFunder().getName().equals("European Commission"));

View File

@ -0,0 +1,205 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.eosc.model.Affiliation;
import eu.dnetlib.dhp.eosc.model.Organization;
import eu.dnetlib.dhp.eosc.model.Relation;
import eu.dnetlib.dhp.eosc.model.Result;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
public class ExtendAffiliationTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory
.getLogger(ExtendAffiliationTest.class);
private static HashMap<String, String> map = new HashMap<>();
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(ExtendAffiliationTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(ExtendAffiliationTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(ExtendAffiliationTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
public void ExtendEoscResultWithOrganizationTest() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input")
.getPath();
final String workingPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/working/")
.getPath();
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
spark
.read()
.textFile(workingPath + "publication")
.write()
.text(workingDir.toString() + "/working/publication");
ExtendEoscResultWithOrganizationStep2.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/",
"-sourcePath", sourcePath,
"-resultType", "publication",
"-workingPath", workingDir.toString() + "/working/"
});
/*
* affiliation relationships 20|13811704aa70::51a6ade52065e3b371d1ae822e07f1ff ->
* 50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba
*/
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/working/publicationextendedaffiliation")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
Assertions.assertEquals(3, tmp.count());
tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
Assertions
.assertEquals(
1,
tmp
.filter(r -> Optional.ofNullable(r.getAffiliation()).isPresent() && r.getAffiliation().size() > 0)
.count());
Assertions
.assertEquals(
1,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getAffiliation()
.size());
List<Affiliation> affiliations = tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getAffiliation();
Assertions
.assertTrue(
affiliations.stream().anyMatch(a -> a.getName().equalsIgnoreCase("Doris Engineering (France)")));
Affiliation organization = affiliations
.stream()
.filter(a -> a.getId().equalsIgnoreCase("20|13811704aa70::51a6ade52065e3b371d1ae822e07f1ff"))
.findFirst()
.get();
Assertions.assertEquals("Doris Engineering (France)", organization.getName());
Assertions
.assertTrue(
organization
.getPid()
.stream()
.anyMatch(
p -> p.getValue().equalsIgnoreCase("grid.432986.2") && p.getType().equalsIgnoreCase("grid")));
Assertions
.assertTrue(
organization
.getPid()
.stream()
.anyMatch(
p -> p.getValue().equalsIgnoreCase("https://ror.org/03nd0ms94")
&& p.getType().equalsIgnoreCase("ror")));
Assertions.assertEquals(2, organization.getPid().size());
}
@Test
public void selectEoscResults() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input")
.getPath();
final String workingPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/working/")
.getPath();
ExtendEoscResultWithOrganizationStep2.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/",
"-sourcePath", sourcePath,
"-resultType", "publication",
"-workingPath", workingPath
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Organization> tmp = sc
.textFile(workingDir.toString() + "/organization")
.map(item -> OBJECT_MAPPER.readValue(item, Organization.class));
JavaRDD<Relation> rels = sc
.textFile(workingDir.toString() + "/resultOrganization")
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
System.out.println(tmp.count());
Assertions.assertEquals(2, tmp.count());
Assertions.assertEquals(2, rels.count());
rels.foreach(r -> Assertions.assertTrue(r.getSource().startsWith("50|")));
rels.foreach(r -> Assertions.assertTrue(r.getTarget().startsWith("20|")));
}
}

View File

@ -0,0 +1,163 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.eosc.model.*;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
public class ExtendProjectTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory
.getLogger(ExtendProjectTest.class);
private static HashMap<String, String> map = new HashMap<>();
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(ExtendProjectTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(ExtendProjectTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(ExtendProjectTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
public void ExtendEoscResultWithProjectTest() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input")
.getPath();
final String workingPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/working/")
.getPath();
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
spark
.read()
.textFile(workingPath + "publication")
.write()
.text(workingDir.toString() + "/working/publicationextendedaffiliation");
SparkUpdateProjectInfo.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/",
"-sourcePath", sourcePath,
"-resultType", "publication",
"-workingPath", workingDir.toString() + "/working/",
"-preparedInfoPath", workingPath + "preparedInfo"
});
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/working/publicationextendedproject")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
Assertions.assertEquals(3, tmp.count());
Assertions
.assertEquals(
2,
tmp
.filter(r -> Optional.ofNullable(r.getProjects()).isPresent() && r.getProjects().size() > 0)
.count());
Assertions
.assertEquals(
2,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getProjects()
.size());
Assertions
.assertEquals(
3,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98"))
.first()
.getProjects()
.size());
List<ProjectSummary> projectSummaries = tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getProjects();
Assertions
.assertTrue(
projectSummaries.stream().anyMatch(p -> p.getFunder().getShortName().equals("NSF")));
Assertions
.assertTrue(
projectSummaries.stream().anyMatch(p -> p.getFunder().getShortName().equals("UKRI")));
JavaRDD<Project> projects = sc
.textFile(workingDir.toString() + "/project")
.map(item -> OBJECT_MAPPER.readValue(item, Project.class));
JavaRDD<Relation> rels = sc
.textFile(workingDir.toString() + "/resultProject")
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
System.out.println(projects.count());
Assertions.assertEquals(5, projects.count());
Assertions.assertEquals(5, rels.count());
rels.foreach(r -> Assertions.assertTrue(r.getSource().startsWith("50|")));
rels.foreach(r -> Assertions.assertTrue(r.getTarget().startsWith("40|")));
}
}

View File

@ -0,0 +1,132 @@
package eu.dnetlib.dhp.oa.graph.dump.eosc;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.eosc.model.Relation;
/**
* @author miriam.baglioni
* @Date 25/10/23
*/
public class OrganizationProjectRelationTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static SparkSession spark;
private static Path workingDir;
private static final Logger log = LoggerFactory
.getLogger(OrganizationProjectRelationTest.class);
private static HashMap<String, String> map = new HashMap<>();
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(OrganizationProjectRelationTest.class.getSimpleName());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(OrganizationProjectRelationTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(OrganizationProjectRelationTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
public void ExtendEoscResultWithProjectTest() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input")
.getPath();
final String workingPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/working/")
.getPath();
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
spark
.read()
.textFile(workingPath + "organization")
.write()
.text(workingDir.toString() + "/working/organization");
spark
.read()
.textFile(workingPath + "project")
.write()
.text(workingDir.toString() + "/working/project");
SparkDumpOrganizationProject.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/working/",
"-sourcePath", sourcePath
});
JavaRDD<Relation> tmp = sc
.textFile(workingDir.toString() + "/working/organizationProject")
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
Assertions.assertEquals(3, tmp.count());
Assertions
.assertEquals(
1,
tmp
.filter(r -> r.getSource().equalsIgnoreCase("20|chistera____::9146e9ef10640675f361d674e77bd254"))
.count());
Assertions
.assertEquals(
2,
tmp
.filter(r -> r.getSource().equalsIgnoreCase("20|corda__h2020::dfe84ab5cad50d4dcfaf5bd0c86e1b64"))
.count());
Assertions
.assertEquals(
1,
tmp
.filter(
r -> r.getSource().equalsIgnoreCase("20|chistera____::9146e9ef10640675f361d674e77bd254") &&
r.getTarget().equalsIgnoreCase("40|nsf_________::d1c070f4252c32e23ccc3f4211c9c621"))
.count());
}
}

View File

@ -24,8 +24,8 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.eosc.model.Affiliation;
import eu.dnetlib.dhp.eosc.model.Indicator;
import eu.dnetlib.dhp.eosc.model.Organization;
import eu.dnetlib.dhp.eosc.model.Result;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import scala.Tuple2;
@ -128,136 +128,6 @@ public class SelectEoscResultTest {
// legalname = MIKARE RESEARCH
// pid = []
// for 50|06cdd3ff4700::ff21e3c55d527fa7db171137c5fd1f1f no affiliation relation is provided
@Test
public void ExtendEoscResultWithOrganizationTest() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/input")
.getPath();
final String cmp = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
.getPath();
String resultPath = getClass()
.getResource("/eu/dnetlib/dhp/oa/graph/dump/eosc/working/publication")
.getPath();
ExtendEoscResultWithOrganizationStep2.main(new String[] {
"-isSparkSessionManaged", Boolean.FALSE.toString(),
"-outputPath", workingDir.toString() + "/publication",
"-sourcePath", sourcePath,
// "-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
"-resultPath", resultPath
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Result> tmp = sc
.textFile(workingDir.toString() + "/publication")
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
Assertions.assertEquals(3, tmp.count());
Assertions
.assertEquals(
2,
tmp
.filter(r -> Optional.ofNullable(r.getAffiliation()).isPresent() && r.getAffiliation().size() > 0)
.count());
Assertions
.assertEquals(
2,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getAffiliation()
.size());
List<Organization> affiliations = tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"))
.first()
.getAffiliation();
Assertions
.assertTrue(
affiliations.stream().anyMatch(a -> a.getName().equalsIgnoreCase("Doris Engineering (France)")));
Assertions.assertTrue(affiliations.stream().anyMatch(a -> a.getName().equalsIgnoreCase("RENNES METROPOLE")));
Organization organization = affiliations
.stream()
.filter(a -> a.getId().equalsIgnoreCase("20|13811704aa70::51a6ade52065e3b371d1ae822e07f1ff"))
.findFirst()
.get();
Assertions.assertEquals("Doris Engineering (France)", organization.getName());
Assertions
.assertTrue(
organization
.getPid()
.stream()
.anyMatch(
p -> p.getValue().equalsIgnoreCase("grid.432986.2") && p.getType().equalsIgnoreCase("grid")));
Assertions
.assertTrue(
organization
.getPid()
.stream()
.anyMatch(
p -> p.getValue().equalsIgnoreCase("https://ror.org/03nd0ms94")
&& p.getType().equalsIgnoreCase("ror")));
Assertions.assertEquals(2, organization.getPid().size());
organization = affiliations
.stream()
.filter(a -> a.getId().equalsIgnoreCase("20|MetisRadboud::b58bdbe8ae5acead04fc76777d2f8017"))
.findFirst()
.get();
Assertions.assertEquals("RENNES METROPOLE", organization.getName());
Assertions.assertEquals(1, organization.getPid().size());
Assertions
.assertTrue(
organization.getPid().get(0).getValue().equalsIgnoreCase("892062829")
&& organization.getPid().get(0).getType().equalsIgnoreCase("pic"));
Assertions
.assertEquals(
1,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98"))
.first()
.getAffiliation()
.size());
Assertions
.assertEquals(
"MIKARE RESEARCH",
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98"))
.first()
.getAffiliation()
.get(0)
.getName());
Assertions
.assertEquals(
0,
tmp
.filter(r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98"))
.first()
.getAffiliation()
.get(0)
.getPid()
.size());
Assertions
.assertFalse(
Optional
.ofNullable(
tmp
.filter(
r -> r.getId().equalsIgnoreCase("50|06cdd3ff4700::ff21e3c55d527fa7db171137c5fd1f1f"))
.first()
.getAffiliation())
.isPresent());
}
@Test
public void verifyIndicatorsTest() throws Exception {

File diff suppressed because one or more lines are too long

View File

@ -9,4 +9,12 @@
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466741040,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","source":"20|____________::d1b0ee22411434cf905692d0fac25749","subRelType":"affiliation","target":"50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466737372,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","source":"20|____________::d1b0ee22411434cf905692d0fac25749","subRelType":"affiliation","target":"50|pmid________::3a5bb2b50c18e755cbe67b9ca7d821ee","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466717565,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","source":"20|aka_________::04ab269cfcf6bd571b6285151ec554b5","subRelType":"affiliation","target":"50|nora_uio__no::01152f3e683765695bbad68fc692b85e","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466733174,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","source":"20|aka_________::0838366fa1df3c1599ddefc2168ada5d","subRelType":"affiliation","target":"50|arXiv_______::abe2b16af6067994dda4beab6410b35d","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466733174,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","source":"20|aka_________::0838366fa1df3c1599ddefc2168ada5d","subRelType":"affiliation","target":"50|arXiv_______::abe2b16af6067994dda4beab6410b35d","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466741040,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","target":"20|chistera____::9146e9ef10640675f361d674e77bd254","subRelType":"affiliation","source":"50|355e65625b88::38d0ab3b2212878dee7072170f1561ee","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466737372,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","target":"20|chistera____::9146e9ef10640675f361d674e77bd254","subRelType":"affiliation","source":"50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466717565,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","target":"20|aka_________::04ab269cfcf6bd571b6285151ec554b5","subRelType":"affiliation","source":"50|355e65625b88::38d0ab3b2212878dee7072170f1561ee","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466733174,"properties":[],"relClass":"isAuthorInstitutionOf","relType":"resultOrganization","target":"20|corda__h2020::dfe84ab5cad50d4dcfaf5bd0c86e1b64","subRelType":"affiliation","source":"50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466741040,"properties":[],"relClass":"isParticipant","relType":"resultOrganization","target":"40|nsf_________::d1c070f4252c32e23ccc3f4211c9c621","subRelType":"participation","source":"20|chistera____::9146e9ef10640675f361d674e77bd254","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8847"},"lastupdatetimestamp":1658466737372,"properties":[],"relClass":"isParticipant","relType":"resultOrganization","target":"40|ukri________::081b09db1211a7b89eb3610d3160e9ba","subRelType":"participation","source":"20|corda__h2020::dfe84ab5cad50d4dcfaf5bd0c86e1b64","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466717565,"properties":[],"relClass":"isParticipant","relType":"resultOrganization","target":"40|nsf_________::d1c070f4252c32e23ccc3f4211c9c621","subRelType":"participation","source":"20|nih_________::7523ba08be91b521952082f0c25daf5f","validated":false}
{"dataInfo":{"deletedbyinference":false,"inferenceprovenance":"iis::document_affiliations","inferred":true,"invisible":false,"provenanceaction":{"classid":"iis","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.8998"},"lastupdatetimestamp":1658466733174,"properties":[],"relClass":"isParticipant","relType":"resultOrganization","target":"40|corda__h2020::5e49c0ee515f36e416a00cc292dfb310","subRelType":"participation","source":"20|corda__h2020::dfe84ab5cad50d4dcfaf5bd0c86e1b64","validated":false}

View File

@ -0,0 +1,2 @@
{"legalshortname":null,"legalname":"School of Computer Science, University of Birmingham","websiteurl":null,"alternativenames":[],"country":{"code":"GB","label":"United Kingdom"},"id":"20|chistera____::9146e9ef10640675f361d674e77bd254","pid":[]}
{"legalshortname":"EVOTHINGS","legalname":"EVOTHINGS AB","websiteurl":"https://evothings.com","alternativenames":[],"country":{"code":"SE","label":"Sweden"},"id":"20|corda__h2020::dfe84ab5cad50d4dcfaf5bd0c86e1b64","pid":[{"type":"PIC","value":"922724335"}]}

View File

@ -0,0 +1,2 @@
{"projectsList":[{"code":"0430175","funder":{"fundingStream":"Directorate for Computer & Information Science & Engineering","jurisdiction":"US","name":"National Science Foundation","shortName":"NSF"},"id":"40|nsf_________::d1c070f4252c32e23ccc3f4211c9c621","provenance":{"provenance":"Harvested","trust":"0.900"},"title":"Collaborative Research: Temporal Aspects"},{"code":"EP/F01161X/1","funder":{"fundingStream":"EPSRC","jurisdiction":"GB","name":"UK Research and Innovation","shortName":"UKRI"},"id":"40|ukri________::081b09db1211a7b89eb3610d3160e9ba","provenance":{"provenance":"Harvested","trust":"0.900"},"title":"The complexity of valued constraints"}],"resultId":"50|06cdd3ff4700::93859bd27121c3ee7c6ee4bfb1790cba"}
{"projectsList":[{"acronym":"METIS","code":"317669","funder":{"fundingStream":"FP7","jurisdiction":"EU","name":"European Commission","shortName":"EC"},"id":"40|corda_______::175629cbea2038ed02c85e7132fc4be2","provenance":{"provenance":"Harvested","trust":"0.900"},"title":"Mobile and wireless communications Enablers for Twenty-twenty (2020) Information Society"},{"code":"unidentified","funder":{"jurisdiction":"CA","name":"Natural Sciences and Engineering Research Council of Canada","shortName":"NSERC"},"id":"40|nserc_______::1e5e62235d094afd01cd56e65112fc63","provenance":{"provenance":"Harvested","trust":"0.900"},"title":"unidentified"},{"acronym":"MiLC","code":"753431","funder":{"fundingStream":"H2020","jurisdiction":"EU","name":"European Commission","shortName":"EC"},"id":"40|corda__h2020::5e49c0ee515f36e416a00cc292dfb310","provenance":{"provenance":"Harvested","trust":"0.900"},"title":"Monotonicity in Logic and Complexity"}],"resultId":"50|06cdd3ff4700::cd7711c65d518859f1d87056e2c45d98"}

View File

@ -0,0 +1,5 @@
{"id":"40|corda__h2020::5e49c0ee515f36e416a00cc292dfb310","websiteurl":null,"code":"287027","acronym":null,"title":"Advanced numerical computation methods for massive parabolic problems","startdate":"2015-09-01","enddate":"2019-08-31","callidentifier":"Academy Project LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null,"funding_stream":null}],"summary":null,"granted":{"currency":"EUR","totalcost":0.0,"fundedamount":644270.0},"h2020programme":[]}
{"id":"40|nsf_________::d1c070f4252c32e23ccc3f4211c9c621","websiteurl":null,"code":"328474","acronym":null,"title":"A novel family-based sequencing approach and dissection of regulatory networks underlying a colour polymorphism","startdate":"2020-01-01","enddate":"2022-01-31","callidentifier":"Molecular Regulatory Networks of Life (RLife), call for Academy-funded researchers BTY","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null,"funding_stream":null}],"summary":null,"granted":{"currency":"EUR","totalcost":0.0,"fundedamount":250000.0},"h2020programme":[]}
{"id":"40|nserc_______::1e5e62235d094afd01cd56e65112fc63","websiteurl":null,"code":"107987","acronym":null,"title":"Structure of the upper mantle beneath Central Fennoscandian Shield from seismic anisotropy studies","startdate":"2005-02-15","enddate":"2005-12-31","callidentifier":"Apurahat tutkijainvaihtoon ja muuhun kahdenvälisiin sopimuksiin perustuvaan yhteistoimintaan LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null,"funding_stream":null}],"summary":null,"granted":{"currency":"EUR","totalcost":0.0,"fundedamount":700.0},"h2020programme":[]}
{"id":"40|corda_______::175629cbea2038ed02c85e7132fc4be2","websiteurl":null,"code":"201608","acronym":null,"title":"Symbiotic Legumes For Sustainable Food Production and Prevention of Land Deglaration in China","startdate":"2003-01-01","enddate":"2005-12-31","callidentifier":"Appropriations for development studies BY","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null,"funding_stream":null}],"summary":null,"granted":{"currency":"EUR","totalcost":0.0,"fundedamount":300010.0},"h2020programme":[]}
{"id":"40|ukri________::081b09db1211a7b89eb3610d3160e9ba","websiteurl":null,"code":"200618","acronym":null,"title":"Atomic Emission.","startdate":"2002-04-24","enddate":"2002-12-31","callidentifier":"Researcher exchange to Finland LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null,"funding_stream":null}],"summary":null,"granted":{"currency":"EUR","totalcost":0.0,"fundedamount":2760.0},"h2020programme":[]}

View File

@ -6,6 +6,7 @@
<modules>
<module>dump-schema</module>
<module>dump</module>
<module>api</module>
</modules>
<parent>
@ -102,7 +103,7 @@
<junit-jupiter.version>5.6.1</junit-jupiter.version>
<dhp.commons.lang.version>3.5</dhp.commons.lang.version>
<dhp.guava.version>11.0.2</dhp.guava.version>
<dhp-schemas.version>[3.17.1]</dhp-schemas.version>
<dhp-schemas.version>[4.17.2]</dhp-schemas.version>
</properties>
</project>