forked from D-Net/dnet-hadoop
Merge branch 'beta' into project_organization_contribution
This commit is contained in:
commit
09ccc7b472
|
@ -191,7 +191,7 @@ public class ZenodoAPIClient implements Serializable {
|
||||||
* @throws MissingConceptDoiException
|
* @throws MissingConceptDoiException
|
||||||
*/
|
*/
|
||||||
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
||||||
setDepositionId(concept_rec_id);
|
setDepositionId(concept_rec_id, 1);
|
||||||
String json = "{}";
|
String json = "{}";
|
||||||
|
|
||||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
||||||
|
@ -253,9 +253,10 @@ public class ZenodoAPIClient implements Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setDepositionId(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
|
||||||
|
|
||||||
ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(), ZenodoModelList.class);
|
ZenodoModelList zenodoModelList = new Gson()
|
||||||
|
.fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
|
||||||
|
|
||||||
for (ZenodoModel zm : zenodoModelList) {
|
for (ZenodoModel zm : zenodoModelList) {
|
||||||
if (zm.getConceptrecid().equals(concept_rec_id)) {
|
if (zm.getConceptrecid().equals(concept_rec_id)) {
|
||||||
|
@ -263,16 +264,23 @@ public class ZenodoAPIClient implements Serializable {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (zenodoModelList.size() == 0)
|
||||||
throw new MissingConceptDoiException("The concept record id specified was missing in the list of depositions");
|
throw new MissingConceptDoiException(
|
||||||
|
"The concept record id specified was missing in the list of depositions");
|
||||||
|
setDepositionId(concept_rec_id, page + 1);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getPrevDepositions() throws IOException {
|
private String getPrevDepositions(String page) throws IOException {
|
||||||
|
|
||||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
||||||
|
|
||||||
|
HttpUrl.Builder urlBuilder = HttpUrl.parse(urlString).newBuilder();
|
||||||
|
urlBuilder.addQueryParameter("page", page);
|
||||||
|
String url = urlBuilder.build().toString();
|
||||||
|
|
||||||
Request request = new Request.Builder()
|
Request request = new Request.Builder()
|
||||||
.url(urlString)
|
.url(url)
|
||||||
.addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
|
.addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
|
||||||
.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
|
.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
|
||||||
.get()
|
.get()
|
||||||
|
|
|
@ -75,9 +75,14 @@ public class DHPUtils {
|
||||||
|
|
||||||
final HttpGet req = new HttpGet(url);
|
final HttpGet req = new HttpGet(url);
|
||||||
|
|
||||||
|
log.info("MDStoreManager request: {}", req);
|
||||||
|
|
||||||
try (final CloseableHttpClient client = HttpClients.createDefault()) {
|
try (final CloseableHttpClient client = HttpClients.createDefault()) {
|
||||||
try (final CloseableHttpResponse response = client.execute(req)) {
|
try (final CloseableHttpResponse response = client.execute(req)) {
|
||||||
final String json = IOUtils.toString(response.getEntity().getContent());
|
final String json = IOUtils.toString(response.getEntity().getContent());
|
||||||
|
|
||||||
|
log.info("MDStoreManager response: {}", json);
|
||||||
|
|
||||||
final MDStoreWithInfo[] mdstores = objectMapper.readValue(json, MDStoreWithInfo[].class);
|
final MDStoreWithInfo[] mdstores = objectMapper.readValue(json, MDStoreWithInfo[].class);
|
||||||
return Arrays
|
return Arrays
|
||||||
.stream(mdstores)
|
.stream(mdstores)
|
||||||
|
|
|
@ -172,6 +172,61 @@ public class PromoteActionPayloadForGraphTableJobTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void shouldPromoteActionPayload_custom() throws Exception {
|
||||||
|
|
||||||
|
Class<? extends Oaf> rowClazz = Publication.class;
|
||||||
|
Class<? extends Oaf> actionPayloadClazz = Result.class;
|
||||||
|
MergeAndGet.Strategy strategy = MergeAndGet.Strategy.MERGE_FROM_AND_GET;
|
||||||
|
|
||||||
|
// given
|
||||||
|
Path inputGraphTableDir = createGraphTable(inputGraphRootDir, rowClazz);
|
||||||
|
Path inputActionPayloadDir = createActionPayload(inputActionPayloadRootDir, rowClazz, actionPayloadClazz);
|
||||||
|
Path outputGraphTableDir = outputDir.resolve("graph").resolve(rowClazz.getSimpleName().toLowerCase());
|
||||||
|
|
||||||
|
// when
|
||||||
|
PromoteActionPayloadForGraphTableJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputGraphTablePath",
|
||||||
|
inputGraphTableDir.toString(),
|
||||||
|
"-graphTableClassName",
|
||||||
|
rowClazz.getCanonicalName(),
|
||||||
|
"-inputActionPayloadPath",
|
||||||
|
inputActionPayloadDir.toString(),
|
||||||
|
"-actionPayloadClassName",
|
||||||
|
actionPayloadClazz.getCanonicalName(),
|
||||||
|
"-outputGraphTablePath",
|
||||||
|
outputGraphTableDir.toString(),
|
||||||
|
"-mergeAndGetStrategy",
|
||||||
|
strategy.name(),
|
||||||
|
"--shouldGroupById",
|
||||||
|
"true"
|
||||||
|
});
|
||||||
|
|
||||||
|
// then
|
||||||
|
assertTrue(Files.exists(outputGraphTableDir));
|
||||||
|
|
||||||
|
List<? extends Oaf> actualOutputRows = readGraphTableFromJobOutput(outputGraphTableDir.toString(), rowClazz)
|
||||||
|
.collectAsList()
|
||||||
|
.stream()
|
||||||
|
.sorted(Comparator.comparingInt(Object::hashCode))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
Publication p = actualOutputRows
|
||||||
|
.stream()
|
||||||
|
.map(o -> (Publication) o)
|
||||||
|
.filter(o -> "50|4ScienceCRIS::6a67ed3daba1c380bf9de3c13ed9c879".equals(o.getId()))
|
||||||
|
.findFirst()
|
||||||
|
.get();
|
||||||
|
|
||||||
|
assertNotNull(p.getMeasures());
|
||||||
|
assertTrue(p.getMeasures().size() > 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public static Stream<Arguments> promoteJobTestParams() {
|
public static Stream<Arguments> promoteJobTestParams() {
|
||||||
return Stream
|
return Stream
|
||||||
.of(
|
.of(
|
||||||
|
|
|
@ -18,3 +18,4 @@
|
||||||
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018243405,"id":"50|CSC_________::00019460865d6cc381b36076131a5bc1","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"Computer Science::Networking and Internet Architecture","qualifier":{"classid":"arxiv","classname":"arxiv","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7416","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018243405,"id":"50|CSC_________::00019460865d6cc381b36076131a5bc1","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"Computer Science::Networking and Internet Architecture","qualifier":{"classid":"arxiv","classname":"arxiv","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7416","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
||||||
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018240982,"id":"50|CSC_________::0001d663c95c4132355e1765375a5275","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"animal diseases","qualifier":{"classid":"mesheuropmc","classname":"mesheuropmc","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7461","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018240982,"id":"50|CSC_________::0001d663c95c4132355e1765375a5275","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"animal diseases","qualifier":{"classid":"mesheuropmc","classname":"mesheuropmc","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7461","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
||||||
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018240982,"id":"50|CSC_________::0001d663c95c4132355e1765375a5275","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"animal diseases","qualifier":{"classid":"mesheuropmc","classname":"mesheuropmc","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7461","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
{"dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}},"lastupdatetimestamp":1572018240982,"id":"50|CSC_________::0001d663c95c4132355e1765375a5275","originalId":[],"collectedfrom":[],"pid":[],"dateofcollection":"","dateoftransformation":"","extraInfo":[],"oaiprovenance":null,"author":[],"resulttype":{"classid":"","classname":"","schemeid":"","schemename":""},"language":{"classid":"","classname":"","schemeid":"","schemename":""},"country":[],"subject":[{"value":"animal diseases","qualifier":{"classid":"mesheuropmc","classname":"mesheuropmc","schemeid":"dnet:subject_classification_typologies","schemename":"dnet:subject_classification_typologies"},"dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"0.7461","inferenceprovenance":"iis::document_classes","provenanceaction":{"classid":"iis","classname":"iis","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}],"title":[],"relevantdate":[],"description":[],"dateofacceptance":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"publisher":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"embargoenddate":{"value":"","dataInfo":{"invisible":false,"inferred":false,"deletedbyinference":false,"trust":"","inferenceprovenance":"","provenanceaction":{"classid":"","classname":"","schemeid":"","schemename":""}}},"source":[],"fulltext":[],"format":[],"contributor":[],"resourcetype":{"classid":"","classname":"","schemeid":"","schemename":""},"coverage":[],"bestaccessright":{"classid":"UNKNOWN","classname":"not available","schemeid":"dnet:access_modes","schemename":"dnet:access_modes"},"context":[],"externalReference":null,"instance":[]}
|
||||||
|
{"collectedfrom":null,"dataInfo":null,"lastupdatetimestamp":null,"id":"50|4ScienceCRIS::6a67ed3daba1c380bf9de3c13ed9c879","originalId":null,"pid":null,"dateofcollection":null,"dateoftransformation":null,"extraInfo":null,"oaiprovenance":null,"processingchargeamount":null,"processingchargecurrency":null,"measures":[{"id":"influence","unit":[{"key":"score","value":"1.64385446761e-08","dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"","inferenceprovenance":"update","provenanceaction":{"classid":"measure:bip","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}]},{"id":"popularity_alt","unit":[{"key":"score","value":"18.9590813696","dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"","inferenceprovenance":"update","provenanceaction":{"classid":"measure:bip","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}]},{"id":"popularity","unit":[{"key":"score","value":"6.00577981643e-08","dataInfo":{"invisible":false,"inferred":true,"deletedbyinference":false,"trust":"","inferenceprovenance":"update","provenanceaction":{"classid":"measure:bip","classname":"Inferred by OpenAIRE","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"}}}]}],"author":null,"resulttype":null,"language":null,"country":null,"subject":null,"title":null,"relevantdate":null,"description":null,"dateofacceptance":null,"publisher":null,"embargoenddate":null,"source":null,"fulltext":null,"format":null,"contributor":null,"resourcetype":null,"coverage":null,"bestaccessright":null,"context":null,"externalReference":null,"instance":null}
|
|
@ -17,6 +17,9 @@ public class PMArticle implements Serializable {
|
||||||
* the Pubmed Identifier
|
* the Pubmed Identifier
|
||||||
*/
|
*/
|
||||||
private String pmid;
|
private String pmid;
|
||||||
|
|
||||||
|
private String pmcId;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the DOI
|
* the DOI
|
||||||
*/
|
*/
|
||||||
|
@ -122,7 +125,7 @@ public class PMArticle implements Serializable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The full journal title (taken from NLM cataloging data following NLM rules for how to compile a serial name) is exported in this element.
|
* The full journal title (taken from NLM cataloging data following NLM rules for how to compile a serial name) is exported in this element.
|
||||||
* Some characters that are not part of the NLM MEDLINE/PubMed Character Set reside in a relatively small number of full journal titles.
|
* Some characters that are not part of the NLM MEDLINE/PubMed Character Set reside in a relatively small number of full journal titles.
|
||||||
* The NLM journal title abbreviation is exported in the <MedlineTA> element.
|
* The NLM journal title abbreviation is exported in the <MedlineTA> element.
|
||||||
*
|
*
|
||||||
* @return the pubmed Journal Extracted
|
* @return the pubmed Journal Extracted
|
||||||
|
@ -140,10 +143,11 @@ public class PMArticle implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* English-language abstracts are taken directly from the published article.
|
* <ArticleTitle> contains the entire title of the journal article. <ArticleTitle> is always in English;
|
||||||
* If the article does not have a published abstract, the National Library of Medicine does not create one,
|
* those titles originally published in a non-English language and translated for <ArticleTitle> are enclosed in square brackets.
|
||||||
* thus the record lacks the <Abstract> and <AbstractText> elements. However, in the absence of a formally
|
* All titles end with a period unless another punctuation mark such as a question mark or bracket is present.
|
||||||
* labeled abstract in the published article, text from a substantive "summary", "summary and conclusions" or "conclusions and summary" may be used.
|
* Explanatory information about the title itself is enclosed in parentheses, e.g.: (author's transl).
|
||||||
|
* Corporate/collective authors may appear at the end of <ArticleTitle> for citations up to about the year 2000.
|
||||||
*
|
*
|
||||||
* @return the extracted pubmed Title
|
* @return the extracted pubmed Title
|
||||||
*/
|
*/
|
||||||
|
@ -250,4 +254,14 @@ public class PMArticle implements Serializable {
|
||||||
public List<PMGrant> getGrants() {
|
public List<PMGrant> getGrants() {
|
||||||
return grants;
|
return grants;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String getPmcId() {
|
||||||
|
return pmcId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PMArticle setPmcId(String pmcId) {
|
||||||
|
this.pmcId = pmcId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,6 +98,7 @@ class PMParser(xml: XMLEventReader) extends Iterator[PMArticle] {
|
||||||
case "PMID" => currentArticle.setPmid(text.trim)
|
case "PMID" => currentArticle.setPmid(text.trim)
|
||||||
case "ArticleId" =>
|
case "ArticleId" =>
|
||||||
if ("doi".equalsIgnoreCase(currentArticleType)) currentArticle.setDoi(text.trim)
|
if ("doi".equalsIgnoreCase(currentArticleType)) currentArticle.setDoi(text.trim)
|
||||||
|
if ("pmc".equalsIgnoreCase(currentArticleType)) currentArticle.setPmcId(text.trim)
|
||||||
case "Language" => currentArticle.setLanguage(text.trim)
|
case "Language" => currentArticle.setLanguage(text.trim)
|
||||||
case "ISSN" => currentJournal.setIssn(text.trim)
|
case "ISSN" => currentJournal.setIssn(text.trim)
|
||||||
case "GrantID" => currentGrant.setGrantID(text.trim)
|
case "GrantID" => currentGrant.setGrantID(text.trim)
|
||||||
|
|
|
@ -4,9 +4,12 @@ import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants
|
import eu.dnetlib.dhp.schema.common.ModelConstants
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.{GraphCleaningFunctions, IdentifierFactory, OafMapperUtils, PidType}
|
import eu.dnetlib.dhp.schema.oaf.utils.{GraphCleaningFunctions, IdentifierFactory, OafMapperUtils, PidType}
|
||||||
import eu.dnetlib.dhp.schema.oaf._
|
import eu.dnetlib.dhp.schema.oaf._
|
||||||
import collection.JavaConverters._
|
import eu.dnetlib.dhp.utils.DHPUtils
|
||||||
|
import org.apache.commons.lang3.StringUtils
|
||||||
|
|
||||||
|
import collection.JavaConverters._
|
||||||
import java.util.regex.Pattern
|
import java.util.regex.Pattern
|
||||||
|
import scala.collection.mutable.ListBuffer
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -14,6 +17,9 @@ object PubMedToOaf {
|
||||||
|
|
||||||
val SUBJ_CLASS = "keywords"
|
val SUBJ_CLASS = "keywords"
|
||||||
|
|
||||||
|
val OAI_HEADER = "oai:pubmedcentral.nih.gov:"
|
||||||
|
val OLD_PMC_PREFIX = "od_______267::"
|
||||||
|
|
||||||
val urlMap = Map(
|
val urlMap = Map(
|
||||||
"pmid" -> "https://pubmed.ncbi.nlm.nih.gov/",
|
"pmid" -> "https://pubmed.ncbi.nlm.nih.gov/",
|
||||||
"doi" -> "https://dx.doi.org/"
|
"doi" -> "https://dx.doi.org/"
|
||||||
|
@ -50,6 +56,17 @@ object PubMedToOaf {
|
||||||
null
|
null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def createOriginalOpenaireId(article:PMArticle) :String = {
|
||||||
|
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
||||||
|
val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC","")}")
|
||||||
|
s"$OLD_PMC_PREFIX$md5"
|
||||||
|
}
|
||||||
|
else
|
||||||
|
null
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/** Create an instance of class extends Result
|
/** Create an instance of class extends Result
|
||||||
* starting from OAF instanceType value
|
* starting from OAF instanceType value
|
||||||
*
|
*
|
||||||
|
@ -122,8 +139,9 @@ object PubMedToOaf {
|
||||||
return null
|
return null
|
||||||
|
|
||||||
// MAP PMID into pid with classid = classname = pmid
|
// MAP PMID into pid with classid = classname = pmid
|
||||||
val pidList: List[StructuredProperty] = List(
|
val pidList = ListBuffer[StructuredProperty]()
|
||||||
OafMapperUtils.structuredProperty(
|
|
||||||
|
pidList += OafMapperUtils.structuredProperty(
|
||||||
article.getPmid,
|
article.getPmid,
|
||||||
PidType.pmid.toString,
|
PidType.pmid.toString,
|
||||||
PidType.pmid.toString,
|
PidType.pmid.toString,
|
||||||
|
@ -131,7 +149,19 @@ object PubMedToOaf {
|
||||||
ModelConstants.DNET_PID_TYPES,
|
ModelConstants.DNET_PID_TYPES,
|
||||||
dataInfo
|
dataInfo
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
if (StringUtils.isNotBlank(article.getPmcId))
|
||||||
|
{
|
||||||
|
pidList += OafMapperUtils.structuredProperty(
|
||||||
|
article.getPmcId,
|
||||||
|
PidType.pmc.toString,
|
||||||
|
PidType.pmc.toString,
|
||||||
|
ModelConstants.DNET_PID_TYPES,
|
||||||
|
ModelConstants.DNET_PID_TYPES,
|
||||||
|
dataInfo
|
||||||
|
)
|
||||||
|
}
|
||||||
if (pidList == null)
|
if (pidList == null)
|
||||||
return null
|
return null
|
||||||
|
|
||||||
|
@ -186,6 +216,7 @@ object PubMedToOaf {
|
||||||
val urlLists: List[String] = pidList
|
val urlLists: List[String] = pidList
|
||||||
.map(s => (urlMap.getOrElse(s.getQualifier.getClassid, ""), s.getValue))
|
.map(s => (urlMap.getOrElse(s.getQualifier.getClassid, ""), s.getValue))
|
||||||
.filter(t => t._1.nonEmpty)
|
.filter(t => t._1.nonEmpty)
|
||||||
|
.toList
|
||||||
.map(t => t._1 + t._2)
|
.map(t => t._1 + t._2)
|
||||||
if (urlLists != null)
|
if (urlLists != null)
|
||||||
pubmedInstance.setUrl(urlLists.asJava)
|
pubmedInstance.setUrl(urlLists.asJava)
|
||||||
|
@ -262,7 +293,14 @@ object PubMedToOaf {
|
||||||
|
|
||||||
if (authors != null && authors.nonEmpty)
|
if (authors != null && authors.nonEmpty)
|
||||||
result.setAuthor(authors.asJava)
|
result.setAuthor(authors.asJava)
|
||||||
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
|
||||||
|
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
||||||
|
val originalIDS = ListBuffer[String]()
|
||||||
|
originalIDS += createOriginalOpenaireId(article)
|
||||||
|
pidList.map(s => s.getValue).foreach(p =>originalIDS += p)
|
||||||
|
result.setOriginalId(originalIDS.asJava)
|
||||||
|
} else
|
||||||
|
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
||||||
|
|
||||||
result.setId(article.getPmid)
|
result.setId(article.getPmid)
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
package eu.dnetlib.dhp.actionmanager.ror;
|
package eu.dnetlib.dhp.actionmanager.ror;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -38,25 +39,20 @@ class GenerateRorActionSetJobTest {
|
||||||
.readValue(IOUtils.toString(getClass().getResourceAsStream("ror_org.json")), RorOrganization.class);
|
.readValue(IOUtils.toString(getClass().getResourceAsStream("ror_org.json")), RorOrganization.class);
|
||||||
final List<AtomicAction<? extends Oaf>> aas = GenerateRorActionSetJob.convertRorOrg(r);
|
final List<AtomicAction<? extends Oaf>> aas = GenerateRorActionSetJob.convertRorOrg(r);
|
||||||
|
|
||||||
Assertions.assertEquals(3, aas.size());
|
Assertions.assertEquals(1, aas.size());
|
||||||
assertEquals(Organization.class, aas.get(0).getClazz());
|
assertEquals(Organization.class, aas.get(0).getClazz());
|
||||||
assertEquals(Relation.class, aas.get(1).getClazz());
|
|
||||||
assertEquals(Relation.class, aas.get(2).getClazz());
|
|
||||||
|
|
||||||
final Organization o = (Organization) aas.get(0).getPayload();
|
final Organization o = (Organization) aas.get(0).getPayload();
|
||||||
final Relation r1 = (Relation) aas.get(1).getPayload();
|
|
||||||
final Relation r2 = (Relation) aas.get(2).getPayload();
|
|
||||||
|
|
||||||
assertEquals(o.getId(), r1.getSource());
|
assertNotNull(o);
|
||||||
assertEquals(r1.getSource(), r2.getTarget());
|
|
||||||
assertEquals(r2.getSource(), r1.getTarget());
|
assertNotNull(o.getCountry());
|
||||||
assertEquals(ModelConstants.IS_PARENT_OF, r1.getRelClass());
|
assertEquals("AU", o.getCountry().getClassid());
|
||||||
assertEquals(ModelConstants.IS_CHILD_OF, r2.getRelClass());
|
|
||||||
|
assertNotNull(o.getLegalname());
|
||||||
|
assertEquals("Mount Stromlo Observatory", o.getLegalname().getValue());
|
||||||
|
|
||||||
System.out.println(mapper.writeValueAsString(o));
|
System.out.println(mapper.writeValueAsString(o));
|
||||||
System.out.println(mapper.writeValueAsString(r1));
|
|
||||||
System.out.println(mapper.writeValueAsString(r2));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -19,7 +19,9 @@ import org.apache.spark.sql.Encoder;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.apache.spark.util.LongAccumulator;
|
import org.apache.spark.util.LongAccumulator;
|
||||||
import org.junit.jupiter.api.*;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.DisplayName;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
import org.junit.jupiter.api.io.TempDir;
|
import org.junit.jupiter.api.io.TempDir;
|
||||||
import org.mockito.junit.jupiter.MockitoExtension;
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
@ -50,7 +52,7 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Test Date cleaner")
|
@DisplayName("Test Date cleaner")
|
||||||
void testDateCleaner() throws Exception {
|
void testDateCleaner() throws Exception {
|
||||||
DateCleaner dc = new DateCleaner();
|
final DateCleaner dc = new DateCleaner();
|
||||||
assertEquals("1982-09-20", dc.clean("20/09/1982"));
|
assertEquals("1982-09-20", dc.clean("20/09/1982"));
|
||||||
assertEquals("2002-09-20", dc.clean("20-09-2002"));
|
assertEquals("2002-09-20", dc.clean("20-09-2002"));
|
||||||
assertEquals("2002-09-20", dc.clean("2002-09-20"));
|
assertEquals("2002-09-20", dc.clean("2002-09-20"));
|
||||||
|
@ -68,9 +70,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_zenodo.xml")));
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_zenodo.xml")));
|
||||||
// We Load the XSLT transformation Rule from the classpath
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||||
|
|
||||||
MetadataRecord result = tr.call(mr);
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
// Print the record
|
// Print the record
|
||||||
System.out.println(result.getBody());
|
System.out.println(result.getBody());
|
||||||
|
@ -86,9 +88,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
||||||
// We Load the XSLT transformation Rule from the classpath
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||||
|
|
||||||
MetadataRecord result = tr.call(mr);
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
// Print the record
|
// Print the record
|
||||||
System.out.println(result.getBody());
|
System.out.println(result.getBody());
|
||||||
|
@ -108,9 +110,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
||||||
// We Load the XSLT transformation Rule from the classpath
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
final XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||||
|
|
||||||
MetadataRecord result = tr.call(mr);
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
// Print the record
|
// Print the record
|
||||||
System.out.println(result.getBody());
|
System.out.println(result.getBody());
|
||||||
|
@ -129,9 +131,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_omicsdi.xml")));
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_omicsdi.xml")));
|
||||||
// We Load the XSLT transformation Rule from the classpath
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
final XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||||
|
|
||||||
MetadataRecord result = tr.call(mr);
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
// Print the record
|
// Print the record
|
||||||
System.out.println(result.getBody());
|
System.out.println(result.getBody());
|
||||||
|
@ -140,7 +142,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Test TransformSparkJobNode.main with oaiOpenaire_datacite (v4)")
|
@DisplayName("Test TransformSparkJobNode.main with oaiOpenaire_datacite (v4)")
|
||||||
void transformTestITGv4OAIdatacite(@TempDir Path testDir) throws Exception {
|
void transformTestITGv4OAIdatacite(@TempDir
|
||||||
|
final Path testDir) throws Exception {
|
||||||
|
|
||||||
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
||||||
|
|
||||||
|
@ -203,7 +206,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@DisplayName("Test TransformSparkJobNode.main")
|
@DisplayName("Test TransformSparkJobNode.main")
|
||||||
void transformTest(@TempDir Path testDir) throws Exception {
|
void transformTest(@TempDir
|
||||||
|
final Path testDir) throws Exception {
|
||||||
|
|
||||||
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
||||||
|
|
||||||
|
@ -256,6 +260,25 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@DisplayName("Test Transform Single XML using cnr_explora_tr XSLTTransformator")
|
||||||
|
void testCnrExploraTransformSaxonHE() throws Exception {
|
||||||
|
|
||||||
|
// We Set the input Record getting the XML from the classpath
|
||||||
|
final MetadataRecord mr = new MetadataRecord();
|
||||||
|
|
||||||
|
mr.setProvenance(new Provenance("openaire____::cnr_explora", "CNR ExploRA", "cnr_________"));
|
||||||
|
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_cnr_explora.xml")));
|
||||||
|
// We Load the XSLT transformation Rule from the classpath
|
||||||
|
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/cnr_explora_tr.xslt");
|
||||||
|
|
||||||
|
final MetadataRecord result = tr.call(mr);
|
||||||
|
|
||||||
|
// Print the record
|
||||||
|
System.out.println(result.getBody());
|
||||||
|
// TODO Create significant Assert
|
||||||
|
}
|
||||||
|
|
||||||
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
||||||
final String trValue = IOUtils.toString(this.getClass().getResourceAsStream(path));
|
final String trValue = IOUtils.toString(this.getClass().getResourceAsStream(path));
|
||||||
final LongAccumulator la = new LongAccumulator();
|
final LongAccumulator la = new LongAccumulator();
|
||||||
|
|
|
@ -195,7 +195,9 @@
|
||||||
<Title>Biochemical and biophysical research communications</Title>
|
<Title>Biochemical and biophysical research communications</Title>
|
||||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Delineation of the intimate details of the backbone conformation of pyridine nucleotide coenzymes in aqueous solution.</ArticleTitle>
|
<ArticleTitle>Delineation of the intimate details of the backbone conformation of pyridine nucleotide
|
||||||
|
coenzymes in aqueous solution.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1173-9</MedlinePgn>
|
<MedlinePgn>1173-9</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -473,7 +475,9 @@
|
||||||
<Title>Biochemical and biophysical research communications</Title>
|
<Title>Biochemical and biophysical research communications</Title>
|
||||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Effect of chloroquine on cultured fibroblasts: release of lysosomal hydrolases and inhibition of their uptake.</ArticleTitle>
|
<ArticleTitle>Effect of chloroquine on cultured fibroblasts: release of lysosomal hydrolases and
|
||||||
|
inhibition of their uptake.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1338-43</MedlinePgn>
|
<MedlinePgn>1338-43</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -657,7 +661,8 @@
|
||||||
<Title>Biochemical and biophysical research communications</Title>
|
<Title>Biochemical and biophysical research communications</Title>
|
||||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Atomic models for the polypeptide backbones of myohemerythrin and hemerythrin.</ArticleTitle>
|
<ArticleTitle>Atomic models for the polypeptide backbones of myohemerythrin and hemerythrin.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1349-56</MedlinePgn>
|
<MedlinePgn>1349-56</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -1627,7 +1632,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Comparison between procaine and isocarboxazid metabolism in vitro by a liver microsomal amidase-esterase.</ArticleTitle>
|
<ArticleTitle>Comparison between procaine and isocarboxazid metabolism in vitro by a liver microsomal
|
||||||
|
amidase-esterase.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1517-21</MedlinePgn>
|
<MedlinePgn>1517-21</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -2030,7 +2037,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Radiochemical assay of glutathione S-epoxide transferase and its enhancement by phenobarbital in rat liver in vivo.</ArticleTitle>
|
<ArticleTitle>Radiochemical assay of glutathione S-epoxide transferase and its enhancement by
|
||||||
|
phenobarbital in rat liver in vivo.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1569-72</MedlinePgn>
|
<MedlinePgn>1569-72</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -2350,7 +2359,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Identification of adenylate cyclase-coupled beta-adrenergic receptors with radiolabeled beta-adrenergic antagonists.</ArticleTitle>
|
<ArticleTitle>Identification of adenylate cyclase-coupled beta-adrenergic receptors with radiolabeled
|
||||||
|
beta-adrenergic antagonists.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1651-8</MedlinePgn>
|
<MedlinePgn>1651-8</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -2598,7 +2609,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>The effect of adrenaline and of alpha- and beta-adrenergic blocking agents on ATP concentration and on incorporation of 32Pi into ATP in rat fat cells.</ArticleTitle>
|
<ArticleTitle>The effect of adrenaline and of alpha- and beta-adrenergic blocking agents on ATP
|
||||||
|
concentration and on incorporation of 32Pi into ATP in rat fat cells.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1659-62</MedlinePgn>
|
<MedlinePgn>1659-62</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -2851,7 +2864,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Action of propranolol on mitochondrial functions--effects on energized ion fluxes in the presence of valinomycin.</ArticleTitle>
|
<ArticleTitle>Action of propranolol on mitochondrial functions--effects on energized ion fluxes in the
|
||||||
|
presence of valinomycin.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1701-5</MedlinePgn>
|
<MedlinePgn>1701-5</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -3265,7 +3280,8 @@
|
||||||
</Chemical>
|
</Chemical>
|
||||||
<Chemical>
|
<Chemical>
|
||||||
<RegistryNumber>EC 2.6.1.16</RegistryNumber>
|
<RegistryNumber>EC 2.6.1.16</RegistryNumber>
|
||||||
<NameOfSubstance UI="D005945">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)</NameOfSubstance>
|
<NameOfSubstance UI="D005945">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)
|
||||||
|
</NameOfSubstance>
|
||||||
</Chemical>
|
</Chemical>
|
||||||
<Chemical>
|
<Chemical>
|
||||||
<RegistryNumber>EC 2.7.-</RegistryNumber>
|
<RegistryNumber>EC 2.7.-</RegistryNumber>
|
||||||
|
@ -3324,7 +3340,9 @@
|
||||||
<DescriptorName UI="D005944" MajorTopicYN="N">Glucosamine</DescriptorName>
|
<DescriptorName UI="D005944" MajorTopicYN="N">Glucosamine</DescriptorName>
|
||||||
</MeshHeading>
|
</MeshHeading>
|
||||||
<MeshHeading>
|
<MeshHeading>
|
||||||
<DescriptorName UI="D005945" MajorTopicYN="N">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)</DescriptorName>
|
<DescriptorName UI="D005945" MajorTopicYN="N">Glutamine-Fructose-6-Phosphate Transaminase
|
||||||
|
(Isomerizing)
|
||||||
|
</DescriptorName>
|
||||||
<QualifierName UI="Q000378" MajorTopicYN="N">metabolism</QualifierName>
|
<QualifierName UI="Q000378" MajorTopicYN="N">metabolism</QualifierName>
|
||||||
</MeshHeading>
|
</MeshHeading>
|
||||||
<MeshHeading>
|
<MeshHeading>
|
||||||
|
@ -3463,7 +3481,8 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Inhibition of aldehyde reductase by acidic metabolites of the biogenic amines.</ArticleTitle>
|
<ArticleTitle>Inhibition of aldehyde reductase by acidic metabolites of the biogenic amines.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1731-3</MedlinePgn>
|
<MedlinePgn>1731-3</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -3696,7 +3715,9 @@
|
||||||
<Title>Biochemical pharmacology</Title>
|
<Title>Biochemical pharmacology</Title>
|
||||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>Effects of 5,6-dihydroxytryptamine on tyrosine-hydroxylase activity in central catecholaminergic neurons of the rat.</ArticleTitle>
|
<ArticleTitle>Effects of 5,6-dihydroxytryptamine on tyrosine-hydroxylase activity in central
|
||||||
|
catecholaminergic neurons of the rat.
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1739-42</MedlinePgn>
|
<MedlinePgn>1739-42</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
|
@ -4602,12 +4623,19 @@
|
||||||
<Title>Arzneimittel-Forschung</Title>
|
<Title>Arzneimittel-Forschung</Title>
|
||||||
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>[Biochemical studies on camomile components/III. In vitro studies about the antipeptic activity of (--)-alpha-bisabolol (author's transl)].</ArticleTitle>
|
<ArticleTitle>[Biochemical studies on camomile components/III. In vitro studies about the antipeptic
|
||||||
|
activity of (--)-alpha-bisabolol (author's transl)].
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1352-4</MedlinePgn>
|
<MedlinePgn>1352-4</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
<Abstract>
|
<Abstract>
|
||||||
<AbstractText>(--)-alpha-Bisabolol has a primary antipeptic action depending on dosage, which is not caused by an alteration of the pH-value. The proteolytic activity of pepsin is reduced by 50 percent through addition of bisabolol in the ratio of 1/0.5. The antipeptic action of bisabolol only occurs in case of direct contact. In case of a previous contact with the substrate, the inhibiting effect is lost.</AbstractText>
|
<AbstractText>(--)-alpha-Bisabolol has a primary antipeptic action depending on dosage, which is not
|
||||||
|
caused by an alteration of the pH-value. The proteolytic activity of pepsin is reduced by 50
|
||||||
|
percent through addition of bisabolol in the ratio of 1/0.5. The antipeptic action of bisabolol
|
||||||
|
only occurs in case of direct contact. In case of a previous contact with the substrate, the
|
||||||
|
inhibiting effect is lost.
|
||||||
|
</AbstractText>
|
||||||
</Abstract>
|
</Abstract>
|
||||||
<AuthorList CompleteYN="Y">
|
<AuthorList CompleteYN="Y">
|
||||||
<Author ValidYN="Y">
|
<Author ValidYN="Y">
|
||||||
|
@ -4626,7 +4654,9 @@
|
||||||
<PublicationType UI="D004740">English Abstract</PublicationType>
|
<PublicationType UI="D004740">English Abstract</PublicationType>
|
||||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||||
</PublicationTypeList>
|
</PublicationTypeList>
|
||||||
<VernacularTitle>Biochemische Untersuchungen von Kamilleninhaltsstoffen. III. In-vitro-Versuche über die antipeptische Wirkung des (-)-alpha-Bisabolols</VernacularTitle>
|
<VernacularTitle>Biochemische Untersuchungen von Kamilleninhaltsstoffen. III. In-vitro-Versuche über die
|
||||||
|
antipeptische Wirkung des (-)-alpha-Bisabolols
|
||||||
|
</VernacularTitle>
|
||||||
</Article>
|
</Article>
|
||||||
<MedlineJournalInfo>
|
<MedlineJournalInfo>
|
||||||
<Country>Germany</Country>
|
<Country>Germany</Country>
|
||||||
|
@ -4753,12 +4783,37 @@
|
||||||
<Title>Arzneimittel-Forschung</Title>
|
<Title>Arzneimittel-Forschung</Title>
|
||||||
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
||||||
</Journal>
|
</Journal>
|
||||||
<ArticleTitle>[Demonstration of tumor inhibiting properties of a strongly immunostimulating low-molecular weight substance. Comparative studies with ifosfamide on the immuno-labile DS carcinosarcoma. Stimulation of the autoimmune activity for approx. 20 days by BA 1, a N-(2-cyanoethylene)-urea. Novel prophylactic possibilities].</ArticleTitle>
|
<ArticleTitle>[Demonstration of tumor inhibiting properties of a strongly immunostimulating
|
||||||
|
low-molecular weight substance. Comparative studies with ifosfamide on the immuno-labile DS
|
||||||
|
carcinosarcoma. Stimulation of the autoimmune activity for approx. 20 days by BA 1, a
|
||||||
|
N-(2-cyanoethylene)-urea. Novel prophylactic possibilities].
|
||||||
|
</ArticleTitle>
|
||||||
<Pagination>
|
<Pagination>
|
||||||
<MedlinePgn>1369-79</MedlinePgn>
|
<MedlinePgn>1369-79</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
<Abstract>
|
<Abstract>
|
||||||
<AbstractText>A report is given on the recent discovery of outstanding immunological properties in BA 1 [N-(2-cyanoethylene)-urea] having a (low) molecular mass M = 111.104. Experiments in 214 DS carcinosarcoma bearing Wistar rats have shown that BA 1, at a dosage of only about 12 percent LD50 (150 mg kg) and negligible lethality (1.7 percent), results in a recovery rate of 40 percent without hyperglycemia and, in one test, of 80 percent with hyperglycemia. Under otherwise unchanged conditions the reference substance ifosfamide (IF) -- a further development of cyclophosphamide -- applied without hyperglycemia in its most efficient dosage of 47 percent LD50 (150 mg kg) brought about a recovery rate of 25 percent at a lethality of 18 percent. (Contrary to BA 1, 250-min hyperglycemia caused no further improvement of the recovery rate.) However this comparison is characterized by the fact that both substances exhibit two quite different (complementary) mechanisms of action. Leucocyte counts made after application of the said cancerostatics and dosages have shown a pronounced stimulation with BA 1 and with ifosfamide, the known suppression in the post-therapeutic interval usually found with standard cancerostatics. In combination with the cited plaque test for BA 1, blood pictures then allow conclusions on the immunity status. Since IF can be taken as one of the most efficient cancerostatics--there is no other chemotherapeutic known up to now that has a more significant effect on the DS carcinosarcoma in rats -- these findings are of special importance. Finally, the total amount of leucocytes and lymphocytes as well as their time behaviour was determined from the blood picture of tumour-free rats after i.v. application of BA 1. The thus obtained numerical values clearly show that further research work on the prophylactic use of this substance seems to be necessary and very promising.</AbstractText>
|
<AbstractText>A report is given on the recent discovery of outstanding immunological properties in
|
||||||
|
BA 1 [N-(2-cyanoethylene)-urea] having a (low) molecular mass M = 111.104. Experiments in 214 DS
|
||||||
|
carcinosarcoma bearing Wistar rats have shown that BA 1, at a dosage of only about 12 percent
|
||||||
|
LD50 (150 mg kg) and negligible lethality (1.7 percent), results in a recovery rate of 40
|
||||||
|
percent without hyperglycemia and, in one test, of 80 percent with hyperglycemia. Under
|
||||||
|
otherwise unchanged conditions the reference substance ifosfamide (IF) -- a further development
|
||||||
|
of cyclophosphamide -- applied without hyperglycemia in its most efficient dosage of 47 percent
|
||||||
|
LD50 (150 mg kg) brought about a recovery rate of 25 percent at a lethality of 18 percent.
|
||||||
|
(Contrary to BA 1, 250-min hyperglycemia caused no further improvement of the recovery rate.)
|
||||||
|
However this comparison is characterized by the fact that both substances exhibit two quite
|
||||||
|
different (complementary) mechanisms of action. Leucocyte counts made after application of the
|
||||||
|
said cancerostatics and dosages have shown a pronounced stimulation with BA 1 and with
|
||||||
|
ifosfamide, the known suppression in the post-therapeutic interval usually found with standard
|
||||||
|
cancerostatics. In combination with the cited plaque test for BA 1, blood pictures then allow
|
||||||
|
conclusions on the immunity status. Since IF can be taken as one of the most efficient
|
||||||
|
cancerostatics--there is no other chemotherapeutic known up to now that has a more significant
|
||||||
|
effect on the DS carcinosarcoma in rats -- these findings are of special importance. Finally,
|
||||||
|
the total amount of leucocytes and lymphocytes as well as their time behaviour was determined
|
||||||
|
from the blood picture of tumour-free rats after i.v. application of BA 1. The thus obtained
|
||||||
|
numerical values clearly show that further research work on the prophylactic use of this
|
||||||
|
substance seems to be necessary and very promising.
|
||||||
|
</AbstractText>
|
||||||
</Abstract>
|
</Abstract>
|
||||||
<AuthorList CompleteYN="Y">
|
<AuthorList CompleteYN="Y">
|
||||||
<Author ValidYN="Y">
|
<Author ValidYN="Y">
|
||||||
|
@ -4778,7 +4833,11 @@
|
||||||
<PublicationType UI="D004740">English Abstract</PublicationType>
|
<PublicationType UI="D004740">English Abstract</PublicationType>
|
||||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||||
</PublicationTypeList>
|
</PublicationTypeList>
|
||||||
<VernacularTitle>Nachweis krebshemmender Eigenschaften einer stark immunstimulierenden Verbindung kleiner Molekülmasse. Versuche am immunlabilen DS-Karzinosarkom im Vergleich mit Ifosfamid. Stimulierung der körpereigenen Abwehr über etwa 20 Tage durch BA 1, einen N-(2-Cyanthylen)-harnstoff. Neue prophylaktische Möglichkeiten</VernacularTitle>
|
<VernacularTitle>Nachweis krebshemmender Eigenschaften einer stark immunstimulierenden Verbindung
|
||||||
|
kleiner Molekülmasse. Versuche am immunlabilen DS-Karzinosarkom im Vergleich mit Ifosfamid.
|
||||||
|
Stimulierung der körpereigenen Abwehr über etwa 20 Tage durch BA 1, einen
|
||||||
|
N-(2-Cyanthylen)-harnstoff. Neue prophylaktische Möglichkeiten
|
||||||
|
</VernacularTitle>
|
||||||
</Article>
|
</Article>
|
||||||
<MedlineJournalInfo>
|
<MedlineJournalInfo>
|
||||||
<Country>Germany</Country>
|
<Country>Germany</Country>
|
||||||
|
@ -5016,7 +5075,20 @@
|
||||||
<MedlinePgn>1400-3</MedlinePgn>
|
<MedlinePgn>1400-3</MedlinePgn>
|
||||||
</Pagination>
|
</Pagination>
|
||||||
<Abstract>
|
<Abstract>
|
||||||
<AbstractText>The distribution of blood flow to the subendocardial, medium and subepicardial layers of the left ventricular free wall was studied in anaesthetized dogs under normoxic (A), hypoxic (B) conditions and under pharmacologically induced (etafenone) coronary vasodilation (C). Regional myocardial blood flow was determined by means of the particle distribution method. In normoxia a transmural gradient of flow was observed, with the subendocardial layers receiving a significantly higher flow rate compared with the subepicardial layers. In hypoxia induced vasodilation this transmural gradient of flow was persistent. In contrast a marked redistribution of regional flow was observed under pharmacologically induced vasodilation. The transmural gradient decreased. In contrast to some findings these experiments demonstrate that a considerable vasodilatory capacity exists in all layers of the myocardium and can be utilized by drugs. The differences observed for the intramural distribution pattern of flow under hypoxia and drug induced vasodilation support the hypothesis that this pattern reflects corresponding gradients of regional myocardial metabolism.</AbstractText>
|
<AbstractText>The distribution of blood flow to the subendocardial, medium and subepicardial layers
|
||||||
|
of the left ventricular free wall was studied in anaesthetized dogs under normoxic (A), hypoxic
|
||||||
|
(B) conditions and under pharmacologically induced (etafenone) coronary vasodilation (C).
|
||||||
|
Regional myocardial blood flow was determined by means of the particle distribution method. In
|
||||||
|
normoxia a transmural gradient of flow was observed, with the subendocardial layers receiving a
|
||||||
|
significantly higher flow rate compared with the subepicardial layers. In hypoxia induced
|
||||||
|
vasodilation this transmural gradient of flow was persistent. In contrast a marked
|
||||||
|
redistribution of regional flow was observed under pharmacologically induced vasodilation. The
|
||||||
|
transmural gradient decreased. In contrast to some findings these experiments demonstrate that a
|
||||||
|
considerable vasodilatory capacity exists in all layers of the myocardium and can be utilized by
|
||||||
|
drugs. The differences observed for the intramural distribution pattern of flow under hypoxia
|
||||||
|
and drug induced vasodilation support the hypothesis that this pattern reflects corresponding
|
||||||
|
gradients of regional myocardial metabolism.
|
||||||
|
</AbstractText>
|
||||||
</Abstract>
|
</Abstract>
|
||||||
<AuthorList CompleteYN="Y">
|
<AuthorList CompleteYN="Y">
|
||||||
<Author ValidYN="Y">
|
<Author ValidYN="Y">
|
||||||
|
@ -5185,4 +5257,151 @@
|
||||||
</ReferenceList>
|
</ReferenceList>
|
||||||
</PubmedData>
|
</PubmedData>
|
||||||
</PubmedArticle>
|
</PubmedArticle>
|
||||||
|
<PubmedArticle>
|
||||||
|
<MedlineCitation Status="MEDLINE" Owner="NLM">
|
||||||
|
<PMID Version="1">4917185</PMID>
|
||||||
|
<DateCompleted>
|
||||||
|
<Year>1970</Year>
|
||||||
|
<Month>10</Month>
|
||||||
|
<Day>27</Day>
|
||||||
|
</DateCompleted>
|
||||||
|
<DateRevised>
|
||||||
|
<Year>2018</Year>
|
||||||
|
<Month>11</Month>
|
||||||
|
<Day>13</Day>
|
||||||
|
</DateRevised>
|
||||||
|
<Article PubModel="Print">
|
||||||
|
<Journal>
|
||||||
|
<ISSN IssnType="Print">0003-6919</ISSN>
|
||||||
|
<JournalIssue CitedMedium="Print">
|
||||||
|
<Volume>19</Volume>
|
||||||
|
<Issue>6</Issue>
|
||||||
|
<PubDate>
|
||||||
|
<Year>1970</Year>
|
||||||
|
<Month>Jun</Month>
|
||||||
|
</PubDate>
|
||||||
|
</JournalIssue>
|
||||||
|
<Title>Applied microbiology</Title>
|
||||||
|
<ISOAbbreviation>Appl Microbiol</ISOAbbreviation>
|
||||||
|
</Journal>
|
||||||
|
<ArticleTitle>Bactericidal activity of a broad-spectrum illumination source.</ArticleTitle>
|
||||||
|
<Pagination>
|
||||||
|
<MedlinePgn>1013-4</MedlinePgn>
|
||||||
|
</Pagination>
|
||||||
|
<Abstract>
|
||||||
|
|
||||||
|
<AbstractText>Several hours of exposure to Vita-Lite lamps, which have a unique spectral
|
||||||
|
distribution, give significant killing of cells of Staphylococcus aureus.
|
||||||
|
</AbstractText>
|
||||||
|
</Abstract>
|
||||||
|
<AuthorList CompleteYN="Y">
|
||||||
|
<Author ValidYN="Y">
|
||||||
|
<LastName>Himmelfarb</LastName>
|
||||||
|
<ForeName>P</ForeName>
|
||||||
|
<Initials>P</Initials>
|
||||||
|
</Author>
|
||||||
|
<Author ValidYN="Y">
|
||||||
|
<LastName>Scott</LastName>
|
||||||
|
<ForeName>A</ForeName>
|
||||||
|
<Initials>A</Initials>
|
||||||
|
</Author>
|
||||||
|
<Author ValidYN="Y">
|
||||||
|
<LastName>Thayer</LastName>
|
||||||
|
<ForeName>P S</ForeName>
|
||||||
|
<Initials>PS</Initials>
|
||||||
|
</Author>
|
||||||
|
</AuthorList>
|
||||||
|
<Language>eng</Language>
|
||||||
|
<PublicationTypeList>
|
||||||
|
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||||
|
</PublicationTypeList>
|
||||||
|
</Article>
|
||||||
|
<MedlineJournalInfo>
|
||||||
|
<Country>United States</Country>
|
||||||
|
<MedlineTA>Appl Microbiol</MedlineTA>
|
||||||
|
<NlmUniqueID>7605802</NlmUniqueID>
|
||||||
|
<ISSNLinking>0003-6919</ISSNLinking>
|
||||||
|
</MedlineJournalInfo>
|
||||||
|
<CitationSubset>IM</CitationSubset>
|
||||||
|
<MeshHeadingList>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D001431" MajorTopicYN="N">Bacteriological Techniques</DescriptorName>
|
||||||
|
<QualifierName UI="Q000295" MajorTopicYN="Y">instrumentation</QualifierName>
|
||||||
|
</MeshHeading>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D008027" MajorTopicYN="Y">Light</DescriptorName>
|
||||||
|
</MeshHeading>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D011830" MajorTopicYN="N">Radiation Effects</DescriptorName>
|
||||||
|
</MeshHeading>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D012706" MajorTopicYN="N">Serratia marcescens</DescriptorName>
|
||||||
|
<QualifierName UI="Q000254" MajorTopicYN="N">growth & development</QualifierName>
|
||||||
|
<QualifierName UI="Q000528" MajorTopicYN="Y">radiation effects</QualifierName>
|
||||||
|
</MeshHeading>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D013210" MajorTopicYN="N">Staphylococcus</DescriptorName>
|
||||||
|
<QualifierName UI="Q000254" MajorTopicYN="N">growth & development</QualifierName>
|
||||||
|
<QualifierName UI="Q000528" MajorTopicYN="Y">radiation effects</QualifierName>
|
||||||
|
</MeshHeading>
|
||||||
|
<MeshHeading>
|
||||||
|
<DescriptorName UI="D013242" MajorTopicYN="N">Sterilization</DescriptorName>
|
||||||
|
</MeshHeading>
|
||||||
|
</MeshHeadingList>
|
||||||
|
</MedlineCitation>
|
||||||
|
<PubmedData>
|
||||||
|
<History>
|
||||||
|
<PubMedPubDate PubStatus="pubmed">
|
||||||
|
<Year>1970</Year>
|
||||||
|
<Month>6</Month>
|
||||||
|
<Day>1</Day>
|
||||||
|
</PubMedPubDate>
|
||||||
|
<PubMedPubDate PubStatus="medline">
|
||||||
|
<Year>1970</Year>
|
||||||
|
<Month>6</Month>
|
||||||
|
<Day>1</Day>
|
||||||
|
<Hour>0</Hour>
|
||||||
|
<Minute>1</Minute>
|
||||||
|
</PubMedPubDate>
|
||||||
|
<PubMedPubDate PubStatus="entrez">
|
||||||
|
<Year>1970</Year>
|
||||||
|
<Month>6</Month>
|
||||||
|
<Day>1</Day>
|
||||||
|
<Hour>0</Hour>
|
||||||
|
<Minute>0</Minute>
|
||||||
|
</PubMedPubDate>
|
||||||
|
</History>
|
||||||
|
<PublicationStatus>ppublish</PublicationStatus>
|
||||||
|
<ArticleIdList>
|
||||||
|
<ArticleId IdType="pubmed">4917185</ArticleId>
|
||||||
|
<ArticleId IdType="pmc">PMC376844</ArticleId>
|
||||||
|
</ArticleIdList>
|
||||||
|
<ReferenceList>
|
||||||
|
<Reference>
|
||||||
|
<Citation>Photochem Photobiol. 1969 Jan;9(1):99-102</Citation>
|
||||||
|
<ArticleIdList>
|
||||||
|
<ArticleId IdType="pubmed">4889809</ArticleId>
|
||||||
|
</ArticleIdList>
|
||||||
|
</Reference>
|
||||||
|
<Reference>
|
||||||
|
<Citation>Endocrinology. 1969 Dec;85(6):1218-21</Citation>
|
||||||
|
<ArticleIdList>
|
||||||
|
<ArticleId IdType="pubmed">5347623</ArticleId>
|
||||||
|
</ArticleIdList>
|
||||||
|
</Reference>
|
||||||
|
<Reference>
|
||||||
|
<Citation>Arch Mikrobiol. 1956;24(1):60-79</Citation>
|
||||||
|
<ArticleIdList>
|
||||||
|
<ArticleId IdType="pubmed">13327987</ArticleId>
|
||||||
|
</ArticleIdList>
|
||||||
|
</Reference>
|
||||||
|
<Reference>
|
||||||
|
<Citation>J Bacteriol. 1941 Sep;42(3):353-66</Citation>
|
||||||
|
<ArticleIdList>
|
||||||
|
<ArticleId IdType="pubmed">16560457</ArticleId>
|
||||||
|
</ArticleIdList>
|
||||||
|
</Reference>
|
||||||
|
</ReferenceList>
|
||||||
|
</PubmedData>
|
||||||
|
</PubmedArticle>
|
||||||
</PubmedArticleSet>
|
</PubmedArticleSet>
|
|
@ -0,0 +1,214 @@
|
||||||
|
<xsl:stylesheet
|
||||||
|
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||||
|
xmlns:oaire="http://namespace.openaire.eu/schema/oaire/"
|
||||||
|
xmlns:vocabulary="http://eu/dnetlib/transform/clean"
|
||||||
|
xmlns:dateCleaner="http://eu/dnetlib/transform/dateISO"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||||
|
xmlns:oai="http://www.openarchives.org/OAI/2.0/"
|
||||||
|
xmlns:datacite="http://datacite.org/schema/kernel-4"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||||
|
xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
exclude-result-prefixes="xsl vocabulary dateCleaner" version="2.0">
|
||||||
|
<xsl:param name="varOfficialName" />
|
||||||
|
<xsl:param name="varDataSourceId" />
|
||||||
|
<xsl:param name="varFP7" select="'corda_______::'" />
|
||||||
|
<xsl:param name="varH2020" select="'corda__h2020::'" />
|
||||||
|
<xsl:param name="repoCode"
|
||||||
|
select="substring-before(//*[local-name() = 'header']/*[local-name()='recordIdentifier'], ':')" />
|
||||||
|
<xsl:param name="index" select="0" />
|
||||||
|
<xsl:param name="transDate" select="current-dateTime()" />
|
||||||
|
|
||||||
|
<xsl:template match="/">
|
||||||
|
<record>
|
||||||
|
<xsl:apply-templates select="//*[local-name() = 'header']" />
|
||||||
|
<metadata>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:title" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:title'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:creator/replace(., '^(.*)\|.*$', '$1')" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:creator'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:contributor" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:contributor'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:description" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:description'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:subject" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:subject'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:publisher" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:publisher'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:format" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:format'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:type" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:type'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:source" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:source'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<dc:language>
|
||||||
|
<xsl:value-of select="vocabulary:clean( //dc:language, 'dnet:languages')" />
|
||||||
|
</dc:language>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:rights" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:rights'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:relation[not(starts-with(.,'info:cnr-pdr'))]" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:relation'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
|
||||||
|
<xsl:call-template name="allElements">
|
||||||
|
<xsl:with-param name="sourceElement" select="//dc:identifier[starts-with(., 'http')]" />
|
||||||
|
<xsl:with-param name="targetElement" select="'dc:identifier'" />
|
||||||
|
</xsl:call-template>
|
||||||
|
<xsl:for-each select="//dc:relation">
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varFP7, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||||
|
<oaf:projectid>
|
||||||
|
<xsl:value-of select="concat($varH2020, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||||
|
</oaf:projectid>
|
||||||
|
</xsl:if>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<oaf:accessrights>
|
||||||
|
<xsl:value-of select="vocabulary:clean( //dc:rights, 'dnet:access_modes')" />
|
||||||
|
</oaf:accessrights>
|
||||||
|
|
||||||
|
<xsl:variable name="varCobjCategory" select="vocabulary:clean( //dc:type, 'dnet:publication_resource')" />
|
||||||
|
<xsl:variable name="varSuperType" select="vocabulary:clean( $varCobjCategory, 'dnet:result_typologies')" />
|
||||||
|
<dr:CobjCategory type="{$varSuperType}"><xsl:value-of select="$varCobjCategory" /></dr:CobjCategory>
|
||||||
|
|
||||||
|
|
||||||
|
<xsl:variable name="varRefereedConvt" select="for $i in (//dc:type, //dc:description, //oai:setSpec) return vocabulary:clean( normalize-space($i), 'dnet:review_levels')" />
|
||||||
|
<xsl:variable name="varRefereedIdntf" select="(//*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '(^|.*[\.\-_/\s\(\)%\d#])pre[\.\-_/\s\(\)%\d#]?prints?([\.\-_/\s\(\)%\d#].*)?$')][count(//dc:identifier) = 1]/'0002', //*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '(^|.*[\.\-_/\s\(\)%\d#])refereed([\.\-_/\s\(\)\d%\d#].*)?$')]/'0001', //*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '.*-peer-reviewed-(fulltext-)?article-.*')]/'0001')" />
|
||||||
|
<xsl:variable name="varRefereedSourc" select="//*[string(node-name(.)) = ('dc:source', 'dc:publisher') and matches(lower-case(.), '^(.*\s)?pre[\s\-_]*prints?([\s\.,].*)?$')]/'0002'" />
|
||||||
|
<xsl:variable name="varRefereedDescr" select="(//dc:description[matches(lower-case(.), '.*(this\s*book|this\s*volume|it)\s*constitutes\s*the\s*(thoroughly\s*)?refereed') or matches(lower-case(.), '.*peer[\.\-_/\s\(\)]?review\s*under\s*responsibility\s*of.*') or matches(lower-case(.), '(this|a)\s*(article|preprint)\s*(has\s*been\s*)?(peer[\-\s]*)?reviewed\s*and\s*recommended\s*by\s*peer[\-\s]*community')]/'0001', //dc:description[matches(., '^version\s*(préliminaire.*|preliminary.*|0$)')]/'0002')" />
|
||||||
|
<xsl:variable name="varRefereedTitle" select="(//dc:title[matches(lower-case(.), '.*\[.*peer[\s\-\._]*review\s*:.*\]\s*$')]/'0001', //dc:title[matches(lower-case(.), '.*\(\s*pre[\s\-\._]*prints?\s*\)\s*$')]/'0002')" />
|
||||||
|
<xsl:variable name="varRefereedSubjt" select="(//dc:subject[matches(lower-case(.), '^\s*refereed\s*$')][//oaf:datasourceprefix = 'narcis______']/'0001', //dc:subject[matches(lower-case(.), '^\s*no[nt].{0,3}refereed\s*$')][//oaf:datasourceprefix = 'narcis______']/'0002')" />
|
||||||
|
<xsl:variable name="varRefereed" select="($varRefereedConvt, $varRefereedIdntf, $varRefereedSourc, $varRefereedDescr, $varRefereedTitle, $varRefereedSubjt)" />
|
||||||
|
<xsl:choose>
|
||||||
|
<xsl:when test="count($varRefereed[. = '0001']) > 0">
|
||||||
|
<oaf:refereed>
|
||||||
|
<xsl:value-of select="'0001'" />
|
||||||
|
</oaf:refereed>
|
||||||
|
</xsl:when>
|
||||||
|
<xsl:when test="count($varRefereed[. = '0002']) > 0">
|
||||||
|
<oaf:refereed>
|
||||||
|
<xsl:value-of select="'0002'" />
|
||||||
|
</oaf:refereed>
|
||||||
|
</xsl:when>
|
||||||
|
</xsl:choose>
|
||||||
|
|
||||||
|
<oaf:dateAccepted>
|
||||||
|
<xsl:value-of select="dateCleaner:dateISO( //dc:date[1] )" />
|
||||||
|
</oaf:dateAccepted>
|
||||||
|
|
||||||
|
<xsl:if test="//dc:relation[starts-with(., 'http')] and //dc:rights[.='info:eu-repo/semantics/openAccess']">
|
||||||
|
<oaf:fulltext>
|
||||||
|
<xsl:value-of select="//dc:relation[starts-with(., 'http')]" />
|
||||||
|
</oaf:fulltext>
|
||||||
|
</xsl:if>
|
||||||
|
|
||||||
|
<oaf:hostedBy name="{$varOfficialName}" id="{$varDataSourceId}" />
|
||||||
|
<oaf:collectedFrom name="{$varOfficialName}" id="{$varDataSourceId}ß" />
|
||||||
|
|
||||||
|
<xsl:variable name="varKnownFileEndings" select="('.bmp', '.doc', '.docx', '.epub', '.flv', '.jpeg', '.jpg', '.m4v', '.mp4', '.mpg', '.odp', '.pdf', '.png', '.ppt', '.tiv', '.txt', '.xls', '.xlsx', '.zip')" />
|
||||||
|
<xsl:variable name="varIdDoi" select="distinct-values((//dc:identifier[starts-with(., '10.')][matches(., '(10[.][0-9]{4,}[^\s/>]*/[^\s>]+)')], //dc:identifier[starts-with(., 'http') and (contains(., '://dx.doi.org/10.') or contains(., '://doi.org/10.'))]/substring-after(., 'doi.org/'), //dc:identifier[starts-with(lower-case(.), 'doi:10.')]/substring-after(lower-case(.), 'doi:')))" />
|
||||||
|
<xsl:for-each select="$varIdDoi">
|
||||||
|
<oaf:identifier identifierType="doi">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:variable name="varIdHdl" select="distinct-values(//dc:identifier[starts-with(., 'http') and contains(., '://hdl.handle.net/')]/substring-after(., 'hdl.handle.net/'))" />
|
||||||
|
<xsl:for-each select="$varIdHdl" >
|
||||||
|
<oaf:identifier identifierType="handle">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:variable name="varIdUrn" select="distinct-values(//dc:identifier[starts-with(., 'urn:nbn:nl:') or starts-with(., 'URN:NBN:NL:')])" />
|
||||||
|
<xsl:for-each select="$varIdUrn">
|
||||||
|
<oaf:identifier identifierType="urn">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:variable name="varOrigBaseUrl" select="//*[local-name() = 'about']/*[local-name() = 'provenance']//*[local-name() = 'originDescription' and not(./*[local-name() = 'originDescription'])]/*[local-name() = 'baseURL']" />
|
||||||
|
<xsl:variable name="varIdLdpg" select="distinct-values(//dc:identifier[(contains(substring-after(., '://'), '/') and contains($varOrigBaseUrl, substring-before(substring-after(., '://'), '/'))) or (contains(substring-after(., '://'), ':') and contains($varOrigBaseUrl, substring-before(substring-after(., '://'), ':')))][not(replace(lower-case(.), '.*(\.[a-z]*)$', '$1') = $varKnownFileEndings)])" />
|
||||||
|
<xsl:for-each select="$varIdLdpg">
|
||||||
|
<oaf:identifier identifierType="landingPage">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:variable name="varIdUrl" select="distinct-values(//dc:identifier[starts-with(., 'http')][not(contains(., '://dx.doi.org/') or contains(., '://doi.org/') or contains(., '://hdl.handle.net/'))][count(index-of($varIdLdpg, .)) = 0])" />
|
||||||
|
<xsl:for-each select="$varIdUrl">
|
||||||
|
<oaf:identifier identifierType="url">
|
||||||
|
<xsl:value-of select="." />
|
||||||
|
</oaf:identifier>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
<xsl:for-each select="//oai:setSpec">
|
||||||
|
<xsl:variable name="rorDsId" select="vocabulary:clean(., 'cnr:institutes')" />
|
||||||
|
<xsl:if test="contains($rorDsId, '/ror.org/')">
|
||||||
|
<oaf:relation relType="resultOrganization" subRelType="affiliation" relClass="hasAuthorInstitution">
|
||||||
|
<xsl:value-of select="concat('ror_________::', $rorDsId)" />
|
||||||
|
</oaf:relation>
|
||||||
|
</xsl:if>
|
||||||
|
</xsl:for-each>
|
||||||
|
|
||||||
|
</metadata>
|
||||||
|
|
||||||
|
<xsl:copy-of select="//*[local-name() = 'about']" />
|
||||||
|
</record>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
|
||||||
|
<xsl:template name="allElements">
|
||||||
|
<xsl:param name="sourceElement" />
|
||||||
|
<xsl:param name="targetElement" />
|
||||||
|
|
||||||
|
<xsl:for-each select="$sourceElement">
|
||||||
|
<xsl:element name="{$targetElement}">
|
||||||
|
<xsl:value-of select="normalize-space(.)" />
|
||||||
|
</xsl:element>
|
||||||
|
</xsl:for-each>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="//*[local-name() = 'header']">
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
<xsl:element name="dr:dateOfTransformation">
|
||||||
|
<xsl:value-of select="$transDate" />
|
||||||
|
</xsl:element>
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
<xsl:template match="node()|@*">
|
||||||
|
<xsl:copy>
|
||||||
|
<xsl:apply-templates select="node()|@*" />
|
||||||
|
</xsl:copy>
|
||||||
|
</xsl:template>
|
||||||
|
|
||||||
|
</xsl:stylesheet>
|
|
@ -0,0 +1,57 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<record xmlns="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<header>
|
||||||
|
<identifier><![CDATA[oai:it.cnr:prodotti:433382]]></identifier>
|
||||||
|
<datestamp><![CDATA[2020-11-30T15:32:03Z]]></datestamp>
|
||||||
|
<setSpec><![CDATA[openaire]]></setSpec>
|
||||||
|
<setSpec><![CDATA[CDS027]]></setSpec>
|
||||||
|
<setSpec><![CDATA[CDS080]]></setSpec>
|
||||||
|
</header>
|
||||||
|
<metadata>
|
||||||
|
<oai_dc:dc xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
|
||||||
|
<dc:type><![CDATA[info:eu-repo/semantics/conferenceObject]]></dc:type>
|
||||||
|
<dc:type><![CDATA[Presentazione]]></dc:type>
|
||||||
|
<dc:title><![CDATA[A multiscale observing approach for understanding acidification process in a marginal sea (northern Adriatic)]]></dc:title>
|
||||||
|
<dc:creator><![CDATA[Cantoni C.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Barba L.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Bastianini M.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Bortoluzzi G.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Celio M.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Chiggiato J.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Cozzi S.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Luchetta A.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Ravaioli M.]]></dc:creator>
|
||||||
|
<dc:creator><![CDATA[Sparnocchia S.]]></dc:creator>
|
||||||
|
<dc:language><![CDATA[eng]]></dc:language>
|
||||||
|
<dc:description><![CDATA[The Northern Adriatic is a shallow, semi-enclosed industrialized sub-basin of the Mediterranean affected by significant ecosystem
|
||||||
|
changes, which are studied through several research activities including the long-term monitoring of ILTER international network.
|
||||||
|
Changes of pHT (-0.06) and TA (+74 ?mol/kg) in dense winter waters over the last 25 years already showed that this area is prone to
|
||||||
|
acidification process under a complex inorganic carbon chemistry variability. To understand these changes, monthly sampling of the
|
||||||
|
main biogeochemical and biological parameters has been carrying out since 2008 by a fixed station (PALOMA, Gulf of Trieste). In 2013
|
||||||
|
the site has been implemented with continuous pCO2 measurements at 3 m depth and has been regularly visiting during basin wide
|
||||||
|
surveys for the last two years. The combination of automated in situ measurements, monthly samplings and basin scale
|
||||||
|
oceanographic cruises is used to better understand the processes controlling air-sea CO2 fluxes and inorganic carbon chemistry under
|
||||||
|
three different scenarios: an extreme event of dense water formation, the phytoplankton blooms associated with riverine inputs and
|
||||||
|
the late-summer marked oxygen under saturation in the deeper waters.]]></dc:description>
|
||||||
|
<dc:source><![CDATA[ASLO - Aquatic Sciences Meeting, Granada, Spagna, 22-27/02/2015]]></dc:source>
|
||||||
|
<dc:source><![CDATA[info:cnr-pdr/source/autori:Cantoni C., Barba L., Bastianini M., Bortoluzzi G., Celio M., Chiggiato J., Cozzi S., Luchetta A., Ravaioli M., Sparnocchia S./congresso_nome:ASLO - Aquatic Sciences Meeting/congresso_luogo:Granada, Spagna/congresso_data:22-27%2F02%2F2015/anno:2015/pagina_da:/pagina_a:/intervallo_pagine:]]></dc:source>
|
||||||
|
<dc:date><![CDATA[2015]]></dc:date>
|
||||||
|
<dc:identifier><![CDATA[http://www.cnr.it/prodotto/i/433382]]></dc:identifier>
|
||||||
|
<dc:identifier><![CDATA[https://publications.cnr.it/doc/433382]]></dc:identifier>
|
||||||
|
<dc:identifier><![CDATA[http://sgmeet.com/aslo/granada2015/]]></dc:identifier>
|
||||||
|
<dc:relation><![CDATA[info:eu-repo/grantAgreement/EC/FP7/211574//Integrated Carbon Observation System/ICOS]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:502/SPARNOCCHIA/STEFANIA]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:990/BASTIANINI/MAURO]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:5185/BARBA/LUISA]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:5453/COZZI/STEFANO]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:12491/LUCHETTA/ANNA]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:12837/CANTONI/CAROLINA]]></dc:relation>
|
||||||
|
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:18161/RAVAIOLI/MARIANGELA]]></dc:relation>
|
||||||
|
<dc:rights><![CDATA[info:eu-repo/semantics/openAccess]]></dc:rights>
|
||||||
|
<dc:subject><![CDATA[ocean acidification]]></dc:subject>
|
||||||
|
<dc:subject><![CDATA[Northern Adriatic Sea]]></dc:subject>
|
||||||
|
<dc:subject><![CDATA[inorganic carbon system]]></dc:subject>
|
||||||
|
<dc:subject><![CDATA[PALOMA]]></dc:subject>
|
||||||
|
</oai_dc:dc>
|
||||||
|
</metadata>
|
||||||
|
</record>
|
|
@ -1233,3 +1233,267 @@ dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-紀要論文(査読有
|
||||||
dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-雑誌記事(査読有り)
|
dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-雑誌記事(査読有り)
|
||||||
dnet:review_levels @=@ 0001 @=@ 原著論文(査読有り)
|
dnet:review_levels @=@ 0001 @=@ 原著論文(査読有り)
|
||||||
dnet:review_levels @=@ 0001 @=@ 査読論文
|
dnet:review_levels @=@ 0001 @=@ 査読論文
|
||||||
|
cnr:institutes @=@ https://ror.org/00brf2d87 @=@ CDS001
|
||||||
|
cnr:institutes @=@ https://ror.org/00brf2d87 @=@ IDASC - Istituto di Acustica e Sensoristica \"Orso Mario Corbino\"
|
||||||
|
cnr:institutes @=@ https://ror.org/006qkqr45 @=@ CDS002
|
||||||
|
cnr:institutes @=@ https://ror.org/006qkqr45 @=@ IAMC - Istituto per l'ambiente marino costiero
|
||||||
|
cnr:institutes @=@ https://ror.org/054ye0e45 @=@ CDS003
|
||||||
|
cnr:institutes @=@ https://ror.org/054ye0e45 @=@ IASI - Istituto di analisi dei sistemi ed informatica \"Antonio Ruberti\"
|
||||||
|
cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ CDS004
|
||||||
|
cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ IAC - Istituto per le applicazioni del calcolo \"Mauro Picone\"
|
||||||
|
cnr:institutes @=@ https://ror.org/000sy1f36 @=@ CDS005
|
||||||
|
cnr:institutes @=@ https://ror.org/000sy1f36 @=@ IASF - Istituto di astrofisica spaziale e fisica cosmica
|
||||||
|
cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ CDS006
|
||||||
|
cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ IBAM - Istituto per i beni archeologici e monumentali
|
||||||
|
cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ CDS007
|
||||||
|
cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ IBP - Istituto di biochimica delle proteine
|
||||||
|
cnr:institutes @=@ https://ror.org/041xzk838 @=@ CDS008
|
||||||
|
cnr:institutes @=@ https://ror.org/041xzk838 @=@ IBF - Istituto di biofisica
|
||||||
|
cnr:institutes @=@ https://ror.org/00s2j5046 @=@ CDS009
|
||||||
|
cnr:institutes @=@ https://ror.org/00s2j5046 @=@ IBFM - Istituto di bioimmagini e fisiologia molecolare
|
||||||
|
cnr:institutes @=@ __CDS010__ @=@ CDS010
|
||||||
|
cnr:institutes @=@ __CDS010__ @=@ IBAF - Istituto di biologia agro-ambientale e forestale
|
||||||
|
cnr:institutes @=@ __CDS011__ @=@ CDS011
|
||||||
|
cnr:institutes @=@ __CDS011__ @=@ IBC - Istituto di biologia cellulare
|
||||||
|
cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ CDS012
|
||||||
|
cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ IBBA - Istituto di biologia e biotecnologia agraria
|
||||||
|
cnr:institutes @=@ https://ror.org/01nyatq71 @=@ CDS013
|
||||||
|
cnr:institutes @=@ https://ror.org/01nyatq71 @=@ IBPM - Istituto di biologia e patologia molecolari
|
||||||
|
cnr:institutes @=@ https://ror.org/01dy2q607 @=@ CDS014
|
||||||
|
cnr:institutes @=@ https://ror.org/01dy2q607 @=@ IBIM - Istituto di biomedicina e di immunologia molecolare \"Alberto Monroy\"
|
||||||
|
cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ CDS015
|
||||||
|
cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ IBIOM - Istituto di Biomembrane, Bioenergetica e Biotecnologie Molecolari
|
||||||
|
cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ CDS016
|
||||||
|
cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ IBIMET - Istituto di biometeorologia
|
||||||
|
cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ CDS017
|
||||||
|
cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ IBB - Istituto di biostrutture e bioimmagini
|
||||||
|
cnr:institutes @=@ https://ror.org/04r5fge26 @=@ CDS018
|
||||||
|
cnr:institutes @=@ https://ror.org/04r5fge26 @=@ ICAR - Istituto di calcolo e reti ad alte prestazioni
|
||||||
|
cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ CDS019
|
||||||
|
cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ ICB - Istituto di chimica biomolecolare
|
||||||
|
cnr:institutes @=@ https://ror.org/02fkw1114 @=@ CDS020
|
||||||
|
cnr:institutes @=@ https://ror.org/02fkw1114 @=@ ICCOM - Istituto di chimica dei composti organo metallici
|
||||||
|
cnr:institutes @=@ https://ror.org/0141vn777 @=@ CDS021
|
||||||
|
cnr:institutes @=@ https://ror.org/0141vn777 @=@ ICRM - Istituto di chimica del riconoscimento molecolare
|
||||||
|
cnr:institutes @=@ __CDS022__ @=@ CDS022
|
||||||
|
cnr:institutes @=@ __CDS022__ @=@ ICTP - Istituto di chimica e tecnologia dei polimeri
|
||||||
|
cnr:institutes @=@ __CDS023__ @=@ CDS023
|
||||||
|
cnr:institutes @=@ __CDS023__ @=@ ICIS - Istituto di chimica inorganica e delle superfici
|
||||||
|
cnr:institutes @=@ https://ror.org/00be3zh53 @=@ CDS024
|
||||||
|
cnr:institutes @=@ https://ror.org/00be3zh53 @=@ ISASI - Istituto di Scienze Applicate e Sistemi Intelligenti \"Eduardo Caianiello\"
|
||||||
|
cnr:institutes @=@ __CDS025__ @=@ CDS025
|
||||||
|
cnr:institutes @=@ __CDS025__ @=@ ICEVO - Istituto di Studi sulle Civiltà dell'Egeo e del Vicino Oriente
|
||||||
|
cnr:institutes @=@ https://ror.org/02ynrme92 @=@ CDS026
|
||||||
|
cnr:institutes @=@ https://ror.org/02ynrme92 @=@ ICVBC - Istituto per la conservazione e valorizzazione dei beni culturali
|
||||||
|
cnr:institutes @=@ https://ror.org/05wba8r86 @=@ CDS027
|
||||||
|
cnr:institutes @=@ https://ror.org/05wba8r86 @=@ IC - Istituto di cristallografia
|
||||||
|
cnr:institutes @=@ https://ror.org/03z58xd74 @=@ CDS028
|
||||||
|
cnr:institutes @=@ https://ror.org/03z58xd74 @=@ IDPA - Istituto per la dinamica dei processi ambientali
|
||||||
|
cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ CDS029
|
||||||
|
cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ IEIIT - Istituto di elettronica e di ingegneria dell'informazione e delle telecomunicazioni
|
||||||
|
cnr:institutes @=@ https://ror.org/04sn06036 @=@ CDS030
|
||||||
|
cnr:institutes @=@ https://ror.org/04sn06036 @=@ IEOS - Istituto per l'endocrinologia e l'oncologia \"Gaetano Salvatore\"
|
||||||
|
cnr:institutes @=@ https://ror.org/01rg40y89 @=@ CDS031
|
||||||
|
cnr:institutes @=@ https://ror.org/01rg40y89 @=@ ICMATE - Istituto di Chimica della Materia Condensata e di Tecnologie per l'Energia
|
||||||
|
cnr:institutes @=@ https://ror.org/00dqega85 @=@ CDS032
|
||||||
|
cnr:institutes @=@ https://ror.org/00dqega85 @=@ IFAC - Istituto di fisica applicata \"Nello Carrara\"
|
||||||
|
cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ CDS033
|
||||||
|
cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ IFP - Istituto di fisica del plasma \"Piero Caldirola\"
|
||||||
|
cnr:institutes @=@ __CDS034__ @=@ CDS034
|
||||||
|
cnr:institutes @=@ __CDS034__ @=@ IFSI - Istituto di fisica dello spazio interplanetario
|
||||||
|
cnr:institutes @=@ https://ror.org/01kdj2848 @=@ CDS035
|
||||||
|
cnr:institutes @=@ https://ror.org/01kdj2848 @=@ IFC - Istituto di fisiologia clinica
|
||||||
|
cnr:institutes @=@ https://ror.org/049ebw417 @=@ CDS036
|
||||||
|
cnr:institutes @=@ https://ror.org/049ebw417 @=@ IFN - Istituto di fotonica e nanotecnologie
|
||||||
|
cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ CDS037
|
||||||
|
cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ IGI - Istituto gas ionizzati
|
||||||
|
cnr:institutes @=@ __CDS038__ @=@ CDS038
|
||||||
|
cnr:institutes @=@ __CDS038__ @=@ IGP - Istituto di genetica delle popolazioni
|
||||||
|
cnr:institutes @=@ https://ror.org/04hadk112 @=@ CDS039
|
||||||
|
cnr:institutes @=@ https://ror.org/04hadk112 @=@ IGB - Istituto di genetica e biofisica \"Adriano Buzzati Traverso\"
|
||||||
|
cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ CDS040
|
||||||
|
cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ IGM - Istituto di genetica molecolare \"Luigi Luca Cavalli Sforza\"
|
||||||
|
cnr:institutes @=@ https://ror.org/01gtsa866 @=@ CDS041
|
||||||
|
cnr:institutes @=@ https://ror.org/01gtsa866 @=@ IBBR - Istituto di Bioscienze e Biorisorse
|
||||||
|
cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ CDS042
|
||||||
|
cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ IGAG - Istituto di geologia ambientale e geoingegneria
|
||||||
|
cnr:institutes @=@ https://ror.org/015bmra78 @=@ CDS043
|
||||||
|
cnr:institutes @=@ https://ror.org/015bmra78 @=@ IGG - Istituto di geoscienze e georisorse
|
||||||
|
cnr:institutes @=@ https://ror.org/02gdcn153 @=@ CDS044
|
||||||
|
cnr:institutes @=@ https://ror.org/02gdcn153 @=@ IIT - Istituto di informatica e telematica
|
||||||
|
cnr:institutes @=@ __CDS045__ @=@ CDS045
|
||||||
|
cnr:institutes @=@ __CDS045__ @=@ ISIB - Istituto di ingegneria biomedica
|
||||||
|
cnr:institutes @=@ https://ror.org/05hky6p02 @=@ CDS046
|
||||||
|
cnr:institutes @=@ https://ror.org/05hky6p02 @=@ IIA - Istituto sull'inquinamento atmosferico
|
||||||
|
cnr:institutes @=@ https://ror.org/011n2hw53 @=@ CDS047
|
||||||
|
cnr:institutes @=@ https://ror.org/011n2hw53 @=@ ILIESI - Istituto per il lessico intellettuale europeo e storia delle idee
|
||||||
|
cnr:institutes @=@ https://ror.org/028g3pe33 @=@ CDS048
|
||||||
|
cnr:institutes @=@ https://ror.org/028g3pe33 @=@ ILC - Istituto di linguistica computazionale \"Antonio Zampolli\"
|
||||||
|
cnr:institutes @=@ __CDS049__ @=@ CDS049
|
||||||
|
cnr:institutes @=@ __CDS049__ @=@ IMAMOTER - Istituto per le macchine agricole e movimento terra
|
||||||
|
cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ CDS050
|
||||||
|
cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ IMATI - Istituto di matematica applicata e tecnologie informatiche \"Enrico Magenes\"
|
||||||
|
cnr:institutes @=@ __CDS051__ @=@ CDS051
|
||||||
|
cnr:institutes @=@ __CDS051__ @=@ IMCB - Istituto per i materiali compositi e biomedici
|
||||||
|
cnr:institutes @=@ https://ror.org/00z8ws214 @=@ CDS052
|
||||||
|
cnr:institutes @=@ https://ror.org/00z8ws214 @=@ IMEM - Istituto dei materiali per l'elettronica ed il magnetismo
|
||||||
|
cnr:institutes @=@ __CDS053__ @=@ CDS053
|
||||||
|
cnr:institutes @=@ __CDS053__ @=@ ISB - Istituto per i Sistemi Biologici
|
||||||
|
cnr:institutes @=@ https://ror.org/00bc51d88 @=@ CDS054
|
||||||
|
cnr:institutes @=@ https://ror.org/00bc51d88 @=@ NANOTEC - Istituto di Nanotecnologia
|
||||||
|
cnr:institutes @=@ https://ror.org/024ye7w89 @=@ CDS055
|
||||||
|
cnr:institutes @=@ https://ror.org/024ye7w89 @=@ IMAA - Istituto di metodologie per l'analisi ambientale
|
||||||
|
cnr:institutes @=@ __CDS056__ @=@ CDS056
|
||||||
|
cnr:institutes @=@ __CDS056__ @=@ IMGC - Istituto di metrologia \"Gustavo Colonnetti\"
|
||||||
|
cnr:institutes @=@ https://ror.org/05vk2g845 @=@ CDS057
|
||||||
|
cnr:institutes @=@ https://ror.org/05vk2g845 @=@ IMM - Istituto per la microelettronica e microsistemi
|
||||||
|
cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ CDS058
|
||||||
|
cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ IM - Istituto motori
|
||||||
|
cnr:institutes @=@ __CDS059__ @=@ CDS059
|
||||||
|
cnr:institutes @=@ __CDS059__ @=@ INMM - Istituto di neurobiologia e medicina molecolare
|
||||||
|
cnr:institutes @=@ https://ror.org/02dr63s31 @=@ CDS060
|
||||||
|
cnr:institutes @=@ https://ror.org/02dr63s31 @=@ IRGB - Istituto di Ricerca Genetica e Biomedica
|
||||||
|
cnr:institutes @=@ https://ror.org/0240rwx68 @=@ CDS061
|
||||||
|
cnr:institutes @=@ https://ror.org/0240rwx68 @=@ IN - Istituto di neuroscienze
|
||||||
|
cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ CDS062
|
||||||
|
cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ OVI - Istituto opera del vocabolario italiano
|
||||||
|
cnr:institutes @=@ https://ror.org/05patmk97 @=@ CDS063
|
||||||
|
cnr:institutes @=@ https://ror.org/05patmk97 @=@ IPCF - Istituto per i processi chimico-fisici
|
||||||
|
cnr:institutes @=@ __CDS064__ @=@ CDS064
|
||||||
|
cnr:institutes @=@ __CDS064__ @=@ IPP - Istituto per la protezione delle piante
|
||||||
|
cnr:institutes @=@ https://ror.org/029st3z03 @=@ CDS065
|
||||||
|
cnr:institutes @=@ https://ror.org/029st3z03 @=@ IRA - Istituto di radioastronomia
|
||||||
|
cnr:institutes @=@ https://ror.org/0040zx077 @=@ CDS066
|
||||||
|
cnr:institutes @=@ https://ror.org/0040zx077 @=@ IRPI - Istituto di ricerca per la protezione idrogeologica
|
||||||
|
cnr:institutes @=@ https://ror.org/044bfsy89 @=@ CDS067
|
||||||
|
cnr:institutes @=@ https://ror.org/044bfsy89 @=@ IRCRES - Istituto di Ricerca sulla Crescita Economica Sostenibile
|
||||||
|
cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ CDS068
|
||||||
|
cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ IRPPS - Istituto di ricerche sulla popolazione e le politiche sociali
|
||||||
|
cnr:institutes @=@ https://ror.org/02db0kh50 @=@ CDS069
|
||||||
|
cnr:institutes @=@ https://ror.org/02db0kh50 @=@ IRSA - Istituto di ricerca sulle acque
|
||||||
|
cnr:institutes @=@ https://ror.org/05813wx75 @=@ CDS070
|
||||||
|
cnr:institutes @=@ https://ror.org/05813wx75 @=@ IRC - Istituto di ricerche sulla combustione
|
||||||
|
cnr:institutes @=@ https://ror.org/04vnwke91 @=@ CDS071
|
||||||
|
cnr:institutes @=@ https://ror.org/04vnwke91 @=@ IRISS - Istituto di Ricerca su Innovazione e Servizi per lo Sviluppo
|
||||||
|
cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ CDS072
|
||||||
|
cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ IREA - Istituto per il rilevamento elettromagnetico dell'ambiente
|
||||||
|
cnr:institutes @=@ https://ror.org/01j6drw72 @=@ CDS073
|
||||||
|
cnr:institutes @=@ https://ror.org/01j6drw72 @=@ ISTEC - Istituto di scienza e tecnologia dei materiali ceramici
|
||||||
|
cnr:institutes @=@ https://ror.org/05kacka20 @=@ CDS074
|
||||||
|
cnr:institutes @=@ https://ror.org/05kacka20 @=@ ISTI - Istituto di scienza e tecnologie dell'informazione \"Alessandro Faedo\"
|
||||||
|
cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ CDS075
|
||||||
|
cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ ISAC - Istituto di scienze dell'atmosfera e del clima
|
||||||
|
cnr:institutes @=@ https://ror.org/0013zhk30 @=@ CDS076
|
||||||
|
cnr:institutes @=@ https://ror.org/0013zhk30 @=@ ISA - Istituto di Scienze dell'Alimentazione
|
||||||
|
cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ CDS077
|
||||||
|
cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ ISPA - Istituto di scienze delle produzioni alimentari
|
||||||
|
cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ CDS078
|
||||||
|
cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ ISTC - Istituto di scienze e tecnologie della cognizione
|
||||||
|
cnr:institutes @=@ https://ror.org/032tyv240 @=@ CDS079
|
||||||
|
cnr:institutes @=@ https://ror.org/032tyv240 @=@ ISTM - Istituto di scienze e tecnologie molecolari
|
||||||
|
cnr:institutes @=@ https://ror.org/02hdf6119 @=@ CDS080
|
||||||
|
cnr:institutes @=@ https://ror.org/02hdf6119 @=@ ISMAR - Istituto di scienze marine
|
||||||
|
cnr:institutes @=@ https://ror.org/01yg57d71 @=@ CDS081
|
||||||
|
cnr:institutes @=@ https://ror.org/01yg57d71 @=@ ISN - Istituto di scienze neurologiche
|
||||||
|
cnr:institutes @=@ https://ror.org/021z1mz76 @=@ CDS082
|
||||||
|
cnr:institutes @=@ https://ror.org/021z1mz76 @=@ ISOF - Istituto per la sintesi organica e la fotoreattività
|
||||||
|
cnr:institutes @=@ https://ror.org/01wqae691 @=@ CDS083
|
||||||
|
cnr:institutes @=@ https://ror.org/01wqae691 @=@ ISPAAM - Istituto per il sistema produzione animale in ambiente Mediterraneo
|
||||||
|
cnr:institutes @=@ __CDS084__ @=@ CDS084
|
||||||
|
cnr:institutes @=@ __CDS084__ @=@ ISAFoM - Istituto per i sistemi agricoli e forestali del mediterraneo
|
||||||
|
cnr:institutes @=@ https://ror.org/00awwz417 @=@ CDS085
|
||||||
|
cnr:institutes @=@ https://ror.org/00awwz417 @=@ ISPF - Istituto per la storia del pensiero filosofico e scientifico moderno
|
||||||
|
cnr:institutes @=@ https://ror.org/03a111314 @=@ CDS086
|
||||||
|
cnr:institutes @=@ https://ror.org/03a111314 @=@ ISEM - Istituto di storia dell'Europa mediterranea
|
||||||
|
cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ CDS087
|
||||||
|
cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ ISM - Istituto di struttura della materia
|
||||||
|
cnr:institutes @=@ https://ror.org/035y5td47 @=@ CDS088
|
||||||
|
cnr:institutes @=@ https://ror.org/035y5td47 @=@ ISGI - Istituto di studi giuridici internazionali
|
||||||
|
cnr:institutes @=@ __CDS089__ @=@ CDS089
|
||||||
|
cnr:institutes @=@ __CDS089__ @=@ ISPRI - Istituto sperimentale di studi socio - economici sull'innovazione e le politiche della ricerca
|
||||||
|
cnr:institutes @=@ https://ror.org/051t1q308 @=@ CDS090
|
||||||
|
cnr:institutes @=@ https://ror.org/051t1q308 @=@ ISSIA - Istituto di studi sui sistemi intelligenti per l'automazione
|
||||||
|
cnr:institutes @=@ https://ror.org/05k3cs357 @=@ CDS091
|
||||||
|
cnr:institutes @=@ https://ror.org/05k3cs357 @=@ ISSIRFA - Istituto di studi sui sistemi regionali federali e sulle autonomie \"Massimo Severo Giannini\"
|
||||||
|
cnr:institutes @=@ https://ror.org/02gcxw165 @=@ CDS092
|
||||||
|
cnr:institutes @=@ https://ror.org/02gcxw165 @=@ ISMA - Istituto di Studi sul Mediterraneo Antico
|
||||||
|
cnr:institutes @=@ https://ror.org/05db0es39 @=@ CDS093
|
||||||
|
cnr:institutes @=@ https://ror.org/05db0es39 @=@ ISMed - Istituto di studi sul Mediterraneo
|
||||||
|
cnr:institutes @=@ https://ror.org/029k6t707 @=@ CDS094
|
||||||
|
cnr:institutes @=@ https://ror.org/029k6t707 @=@ ISE - Istituto per lo studio degli ecosistemi
|
||||||
|
cnr:institutes @=@ https://ror.org/00w6r1881 @=@ CDS095
|
||||||
|
cnr:institutes @=@ https://ror.org/00w6r1881 @=@ ISMN - Istituto per lo studio dei materiali nanostrutturati
|
||||||
|
cnr:institutes @=@ https://ror.org/01mfmr054 @=@ CDS096
|
||||||
|
cnr:institutes @=@ https://ror.org/01mfmr054 @=@ ISMAC - Istituto per lo studio delle macromolecole
|
||||||
|
cnr:institutes @=@ https://ror.org/058nrs650 @=@ CDS097
|
||||||
|
cnr:institutes @=@ https://ror.org/058nrs650 @=@ ITM - Istituto per la tecnologia delle membrane
|
||||||
|
cnr:institutes @=@ https://ror.org/0331xj092 @=@ CDS098
|
||||||
|
cnr:institutes @=@ https://ror.org/0331xj092 @=@ ITABC - Istituto per le tecnologie applicate ai beni culturali
|
||||||
|
cnr:institutes @=@ https://ror.org/052q58629 @=@ CDS099
|
||||||
|
cnr:institutes @=@ https://ror.org/052q58629 @=@ ITAE - Istituto di tecnologie avanzate per l'energia \"Nicola Giordano\"
|
||||||
|
cnr:institutes @=@ https://ror.org/04ehykb85 @=@ CDS100
|
||||||
|
cnr:institutes @=@ https://ror.org/04ehykb85 @=@ ITB - Istituto di tecnologie biomediche
|
||||||
|
cnr:institutes @=@ https://ror.org/0221agg28 @=@ CDS101
|
||||||
|
cnr:institutes @=@ https://ror.org/0221agg28 @=@ ITC - Istituto per le tecnologie della costruzione
|
||||||
|
cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ CDS102
|
||||||
|
cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ ITD - Istituto per le tecnologie didattiche
|
||||||
|
cnr:institutes @=@ __CDS103__ @=@ CDS103
|
||||||
|
cnr:institutes @=@ __CDS103__ @=@ STIIMA - Istituto di Sistemi e Tecnologie Industriali Intelligenti per il Manifatturiero Avanzato
|
||||||
|
cnr:institutes @=@ https://ror.org/01as2bh37 @=@ CDS104
|
||||||
|
cnr:institutes @=@ https://ror.org/01as2bh37 @=@ ITTIG - Istituto di teoria e tecniche dell'informazione giuridica
|
||||||
|
cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ CDS105
|
||||||
|
cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ ITOI - Istituto per i trapianti d'organo e immunocitologia
|
||||||
|
cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ CDS106
|
||||||
|
cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ IVALSA - Istituto per la valorizzazione del legno e delle specie arboree
|
||||||
|
cnr:institutes @=@ __CDS107__ @=@ CDS107
|
||||||
|
cnr:institutes @=@ __CDS107__ @=@ IVV - Istituto di virologia vegetale
|
||||||
|
cnr:institutes @=@ https://ror.org/013nxtf56 @=@ CDS108
|
||||||
|
cnr:institutes @=@ https://ror.org/013nxtf56 @=@ IRSIG - Istituto di ricerca sui sistemi giudiziari
|
||||||
|
cnr:institutes @=@ https://ror.org/05rcgef49 @=@ CDS109
|
||||||
|
cnr:institutes @=@ https://ror.org/05rcgef49 @=@ ISC - Istituto dei sistemi complessi
|
||||||
|
cnr:institutes @=@ __CDS110__ @=@ CDS110
|
||||||
|
cnr:institutes @=@ __CDS110__ @=@ INFM - Centro di responsabilità scientifica INFM
|
||||||
|
cnr:institutes @=@ https://ror.org/02dp3a879 @=@ CDS111
|
||||||
|
cnr:institutes @=@ https://ror.org/02dp3a879 @=@ INO - Istituto nazionale di ottica
|
||||||
|
cnr:institutes @=@ __CDS112__ @=@ CDS112
|
||||||
|
cnr:institutes @=@ __CDS112__ @=@ IDAIC - Centro di responsabilità di attività scientifica IDAIC
|
||||||
|
cnr:institutes @=@ https://ror.org/00p03yg71 @=@ CDS113
|
||||||
|
cnr:institutes @=@ https://ror.org/00p03yg71 @=@ SPIN - Istituto superconduttori, materiali innovativi e dispositivi
|
||||||
|
cnr:institutes @=@ https://ror.org/00yfw2296 @=@ CDS114
|
||||||
|
cnr:institutes @=@ https://ror.org/00yfw2296 @=@ IOM - Istituto officina dei materiali
|
||||||
|
cnr:institutes @=@ https://ror.org/0042e5975 @=@ CDS115
|
||||||
|
cnr:institutes @=@ https://ror.org/0042e5975 @=@ NANO - Istituto Nanoscienze
|
||||||
|
cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ CDS116
|
||||||
|
cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ IFT - Istituto di Farmacologia Traslazionale
|
||||||
|
cnr:institutes @=@ https://ror.org/040xhth73 @=@ CDS117
|
||||||
|
cnr:institutes @=@ https://ror.org/040xhth73 @=@ IBCN - Istituto di Biologia Cellulare e Neurobiologia
|
||||||
|
cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ CDS118
|
||||||
|
cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ INM - Istituto di iNgegneria del Mare
|
||||||
|
cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ CDS119
|
||||||
|
cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ IPCB - Istituto per i Polimeri, Compositi e Biomateriali
|
||||||
|
cnr:institutes @=@ https://ror.org/008fjbg42 @=@ CDS121
|
||||||
|
cnr:institutes @=@ https://ror.org/008fjbg42 @=@ IPSP - Istituto per la Protezione Sostenibile delle Piante
|
||||||
|
cnr:institutes @=@ __CDS122__ @=@ CDS122
|
||||||
|
cnr:institutes @=@ __CDS122__ @=@ IRBIM - Istituto per le Risorse Biologiche e le Biotecnologie Marine
|
||||||
|
cnr:institutes @=@ __CDS123__ @=@ CDS123
|
||||||
|
cnr:institutes @=@ __CDS123__ @=@ ISPC - Istituto di Scienze del Patrimonio Culturale
|
||||||
|
cnr:institutes @=@ __CDS124__ @=@ CDS124
|
||||||
|
cnr:institutes @=@ __CDS124__ @=@ IAS - Istituto per lo studio degli impatti Antropici e Sostenibilità in ambiente marino
|
||||||
|
cnr:institutes @=@ __CDS125__ @=@ CDS125
|
||||||
|
cnr:institutes @=@ __CDS125__ @=@ IRET - Istituto di Ricerca sugli Ecosistemi Terrestri
|
||||||
|
cnr:institutes @=@ https://ror.org/03a0vt050 @=@ CDS126
|
||||||
|
cnr:institutes @=@ https://ror.org/03a0vt050 @=@ ISTP - Istituto per la Scienza e Tecnologia dei Plasmi
|
||||||
|
cnr:institutes @=@ __CDS127__ @=@ CDS127
|
||||||
|
cnr:institutes @=@ __CDS127__ @=@ ISP - Istituto di Scienze Polari
|
||||||
|
cnr:institutes @=@ https://ror.org/03byxpq91 @=@ CDS128
|
||||||
|
cnr:institutes @=@ https://ror.org/03byxpq91 @=@ IRIB - Istituto per la Ricerca e l'Innovazione Biomedica
|
||||||
|
cnr:institutes @=@ __CDS129__ @=@ CDS129
|
||||||
|
cnr:institutes @=@ __CDS129__ @=@ IGSG - Istituto di Informatica Giuridica e Sistemi Giudiziari
|
||||||
|
cnr:institutes @=@ __CDS130__ @=@ CDS130
|
||||||
|
cnr:institutes @=@ __CDS130__ @=@ IBBC - Istituto di Biochimica e Biologia Cellulare
|
||||||
|
cnr:institutes @=@ __CDS131__ @=@ CDS131
|
||||||
|
cnr:institutes @=@ __CDS131__ @=@ IBE - Istituto per la BioEconomia
|
||||||
|
cnr:institutes @=@ https://ror.org/0263zy895 @=@ CDS132
|
||||||
|
cnr:institutes @=@ https://ror.org/0263zy895 @=@ SCITEC - Istituto di Scienze e Tecnologie Chimiche \"Giulio Natta\"
|
||||||
|
cnr:institutes @=@ __CDS133__ @=@ CDS133
|
||||||
|
cnr:institutes @=@ __CDS133__ @=@ STEMS - Istituto di Scienze e Tecnologie per l'Energia e la Mobilità Sostenibili
|
|
@ -1078,3 +1078,136 @@ dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/AUTHOR/ORCID @=@ An Ope
|
||||||
dnet:review_levels @=@ dnet:review_levels @=@ 0000 @=@ Unknown
|
dnet:review_levels @=@ dnet:review_levels @=@ 0000 @=@ Unknown
|
||||||
dnet:review_levels @=@ dnet:review_levels @=@ 0002 @=@ nonPeerReviewed
|
dnet:review_levels @=@ dnet:review_levels @=@ 0002 @=@ nonPeerReviewed
|
||||||
dnet:review_levels @=@ dnet:review_levels @=@ 0001 @=@ peerReviewed
|
dnet:review_levels @=@ dnet:review_levels @=@ 0001 @=@ peerReviewed
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00brf2d87 @=@ https://ror.org/00brf2d87
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/006qkqr45 @=@ https://ror.org/006qkqr45
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/054ye0e45 @=@ https://ror.org/054ye0e45
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ https://ror.org/00ygy3d85
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/000sy1f36 @=@ https://ror.org/000sy1f36
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ https://ror.org/00x5wpm25
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ https://ror.org/03eqeqg74
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/041xzk838 @=@ https://ror.org/041xzk838
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00s2j5046 @=@ https://ror.org/00s2j5046
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS010__ @=@ __CDS010__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS011__ @=@ __CDS011__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ https://ror.org/02e5sbe24
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01nyatq71 @=@ https://ror.org/01nyatq71
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01dy2q607 @=@ https://ror.org/01dy2q607
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ https://ror.org/05nzf7q96
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ https://ror.org/05m1yqp60
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ https://ror.org/03rqtqb02
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04r5fge26 @=@ https://ror.org/04r5fge26
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ https://ror.org/03wyf0g15
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02fkw1114 @=@ https://ror.org/02fkw1114
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0141vn777 @=@ https://ror.org/0141vn777
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS022__ @=@ __CDS022__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS023__ @=@ __CDS023__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00be3zh53 @=@ https://ror.org/00be3zh53
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS025__ @=@ __CDS025__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02ynrme92 @=@ https://ror.org/02ynrme92
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05wba8r86 @=@ https://ror.org/05wba8r86
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03z58xd74 @=@ https://ror.org/03z58xd74
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ https://ror.org/00n4jbh84
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04sn06036 @=@ https://ror.org/04sn06036
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01rg40y89 @=@ https://ror.org/01rg40y89
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00dqega85 @=@ https://ror.org/00dqega85
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ https://ror.org/02n2bgz18
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS034__ @=@ __CDS034__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01kdj2848 @=@ https://ror.org/01kdj2848
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/049ebw417 @=@ https://ror.org/049ebw417
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ https://ror.org/01f5tnx94
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS038__ @=@ __CDS038__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04hadk112 @=@ https://ror.org/04hadk112
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ https://ror.org/03qpd8w66
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01gtsa866 @=@ https://ror.org/01gtsa866
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ https://ror.org/00ytw6m58
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/015bmra78 @=@ https://ror.org/015bmra78
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02gdcn153 @=@ https://ror.org/02gdcn153
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS045__ @=@ __CDS045__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05hky6p02 @=@ https://ror.org/05hky6p02
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/011n2hw53 @=@ https://ror.org/011n2hw53
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/028g3pe33 @=@ https://ror.org/028g3pe33
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS049__ @=@ __CDS049__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ https://ror.org/03m0n3c07
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS051__ @=@ __CDS051__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00z8ws214 @=@ https://ror.org/00z8ws214
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS053__ @=@ __CDS053__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00bc51d88 @=@ https://ror.org/00bc51d88
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/024ye7w89 @=@ https://ror.org/024ye7w89
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS056__ @=@ __CDS056__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05vk2g845 @=@ https://ror.org/05vk2g845
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ https://ror.org/02qwy8e97
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS059__ @=@ __CDS059__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02dr63s31 @=@ https://ror.org/02dr63s31
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0240rwx68 @=@ https://ror.org/0240rwx68
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ https://ror.org/02rzxrg25
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05patmk97 @=@ https://ror.org/05patmk97
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS064__ @=@ __CDS064__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/029st3z03 @=@ https://ror.org/029st3z03
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0040zx077 @=@ https://ror.org/0040zx077
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/044bfsy89 @=@ https://ror.org/044bfsy89
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ https://ror.org/01n1ayq61
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02db0kh50 @=@ https://ror.org/02db0kh50
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05813wx75 @=@ https://ror.org/05813wx75
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04vnwke91 @=@ https://ror.org/04vnwke91
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ https://ror.org/02wxw4x45
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01j6drw72 @=@ https://ror.org/01j6drw72
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05kacka20 @=@ https://ror.org/05kacka20
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ https://ror.org/00n8ttd98
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0013zhk30 @=@ https://ror.org/0013zhk30
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ https://ror.org/03x7xkr71
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ https://ror.org/05w9g2j85
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/032tyv240 @=@ https://ror.org/032tyv240
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02hdf6119 @=@ https://ror.org/02hdf6119
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01yg57d71 @=@ https://ror.org/01yg57d71
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/021z1mz76 @=@ https://ror.org/021z1mz76
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01wqae691 @=@ https://ror.org/01wqae691
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS084__ @=@ __CDS084__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00awwz417 @=@ https://ror.org/00awwz417
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03a111314 @=@ https://ror.org/03a111314
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ https://ror.org/01zz9wh30
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/035y5td47 @=@ https://ror.org/035y5td47
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS089__ @=@ __CDS089__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/051t1q308 @=@ https://ror.org/051t1q308
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05k3cs357 @=@ https://ror.org/05k3cs357
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02gcxw165 @=@ https://ror.org/02gcxw165
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05db0es39 @=@ https://ror.org/05db0es39
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/029k6t707 @=@ https://ror.org/029k6t707
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00w6r1881 @=@ https://ror.org/00w6r1881
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01mfmr054 @=@ https://ror.org/01mfmr054
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/058nrs650 @=@ https://ror.org/058nrs650
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0331xj092 @=@ https://ror.org/0331xj092
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/052q58629 @=@ https://ror.org/052q58629
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04ehykb85 @=@ https://ror.org/04ehykb85
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0221agg28 @=@ https://ror.org/0221agg28
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ https://ror.org/02xz4xc25
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS103__ @=@ __CDS103__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01as2bh37 @=@ https://ror.org/01as2bh37
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ https://ror.org/01y5w6t76
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ https://ror.org/04xy2mq71
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS107__ @=@ __CDS107__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/013nxtf56 @=@ https://ror.org/013nxtf56
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05rcgef49 @=@ https://ror.org/05rcgef49
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS110__ @=@ __CDS110__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02dp3a879 @=@ https://ror.org/02dp3a879
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS112__ @=@ __CDS112__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00p03yg71 @=@ https://ror.org/00p03yg71
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00yfw2296 @=@ https://ror.org/00yfw2296
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0042e5975 @=@ https://ror.org/0042e5975
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ https://ror.org/03ta8pf33
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/040xhth73 @=@ https://ror.org/040xhth73
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ https://ror.org/02qnx8e75
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ https://ror.org/05nr7xa08
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/008fjbg42 @=@ https://ror.org/008fjbg42
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS122__ @=@ __CDS122__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS123__ @=@ __CDS123__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS124__ @=@ __CDS124__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS125__ @=@ __CDS125__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03a0vt050 @=@ https://ror.org/03a0vt050
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS127__ @=@ __CDS127__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03byxpq91 @=@ https://ror.org/03byxpq91
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS129__ @=@ __CDS129__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS130__ @=@ __CDS130__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS131__ @=@ __CDS131__
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0263zy895 @=@ https://ror.org/0263zy895
|
||||||
|
cnr:institutes @=@ cnr:institutes @=@ __CDS133__ @=@ __CDS133__
|
||||||
|
|
||||||
|
|
|
@ -2,11 +2,14 @@ package eu.dnetlib.dhp.datacite
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
|
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
|
||||||
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf
|
import eu.dnetlib.dhp.schema.oaf.{Dataset => OafDataset, _}
|
||||||
import org.apache.commons.io.FileUtils
|
import org.apache.commons.io.FileUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.functions.{col, count}
|
import org.apache.spark.sql.functions.{col, count}
|
||||||
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SparkSession}
|
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SparkSession}
|
||||||
|
import org.json4s.DefaultFormats
|
||||||
|
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||||
|
import org.json4s.jackson.JsonMethods.parse
|
||||||
import org.junit.jupiter.api.Assertions._
|
import org.junit.jupiter.api.Assertions._
|
||||||
import org.junit.jupiter.api.extension.ExtendWith
|
import org.junit.jupiter.api.extension.ExtendWith
|
||||||
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
|
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
|
||||||
|
@ -45,6 +48,9 @@ class DataciteToOAFTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testConvert(): Unit = {
|
def testConvert(): Unit = {
|
||||||
|
|
||||||
|
@ -70,17 +76,18 @@ class DataciteToOAFTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
assertEquals(100, nativeSize)
|
assertEquals(100, nativeSize)
|
||||||
|
|
||||||
spark.read.load(targetPath).printSchema();
|
val result: Dataset[String] = spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
val result: Dataset[Oaf] = spark.read.load(targetPath).as[Oaf]
|
|
||||||
|
|
||||||
result
|
result
|
||||||
.map(s => s.getClass.getSimpleName)
|
|
||||||
.groupBy(col("value").alias("class"))
|
.groupBy(col("value").alias("class"))
|
||||||
.agg(count("value").alias("Total"))
|
.agg(count("value").alias("Total"))
|
||||||
.show(false)
|
.show(false)
|
||||||
|
|
||||||
val t = spark.read.load(targetPath).count()
|
val t = spark.read.text(targetPath).as[String].count()
|
||||||
|
|
||||||
assertTrue(t > 0)
|
assertTrue(t > 0)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
package eu.dnetlib.dhp.datacite
|
||||||
|
|
||||||
|
import org.json4s.DefaultFormats
|
||||||
|
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||||
|
import org.json4s.jackson.JsonMethods.parse
|
||||||
|
|
||||||
|
object DataciteUtilityTest {
|
||||||
|
|
||||||
|
def convertToOAF(input:String) : String = {
|
||||||
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
|
lazy val json = parse(input)
|
||||||
|
|
||||||
|
|
||||||
|
val isRelation:String = (json \\ "source").extractOrElse("NULL")
|
||||||
|
|
||||||
|
if (isRelation != "NULL") {
|
||||||
|
return "Relation"
|
||||||
|
}
|
||||||
|
|
||||||
|
val iType: List[String] = for {
|
||||||
|
JObject(instance) <- json \\ "instance"
|
||||||
|
JField("instancetype", JObject(instancetype)) <- instance
|
||||||
|
JField("classname", JString(classname)) <- instancetype
|
||||||
|
|
||||||
|
} yield classname
|
||||||
|
|
||||||
|
val l:String =iType.head.toLowerCase()
|
||||||
|
l
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -2,9 +2,10 @@ package eu.dnetlib.dhp.sx.bio
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
|
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
|
||||||
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
||||||
import eu.dnetlib.dhp.schema.oaf.{Oaf, Relation, Result}
|
import eu.dnetlib.dhp.schema.oaf.utils.PidType
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.{Oaf, Publication, Relation, Result}
|
||||||
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
||||||
import eu.dnetlib.dhp.sx.bio.pubmed.{PMArticle, PMParser, PubMedToOaf}
|
import eu.dnetlib.dhp.sx.bio.pubmed.{PMArticle, PMParser, PMSubject, PubMedToOaf}
|
||||||
import org.json4s.DefaultFormats
|
import org.json4s.DefaultFormats
|
||||||
import org.json4s.JsonAST.{JField, JObject, JString}
|
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||||
import org.json4s.jackson.JsonMethods.parse
|
import org.json4s.jackson.JsonMethods.parse
|
||||||
|
@ -16,6 +17,7 @@ import org.mockito.junit.jupiter.MockitoExtension
|
||||||
import java.io.{BufferedReader, InputStream, InputStreamReader}
|
import java.io.{BufferedReader, InputStream, InputStreamReader}
|
||||||
import java.util.zip.GZIPInputStream
|
import java.util.zip.GZIPInputStream
|
||||||
import scala.collection.JavaConverters._
|
import scala.collection.JavaConverters._
|
||||||
|
import scala.collection.mutable.ListBuffer
|
||||||
import scala.io.Source
|
import scala.io.Source
|
||||||
import scala.xml.pull.XMLEventReader
|
import scala.xml.pull.XMLEventReader
|
||||||
|
|
||||||
|
@ -72,6 +74,102 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
)
|
)
|
||||||
println(mapper.writeValueAsString(r.head))
|
println(mapper.writeValueAsString(r.head))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private def checkPMArticle(article:PMArticle): Unit = {
|
||||||
|
assertNotNull(article.getPmid)
|
||||||
|
assertNotNull(article.getTitle)
|
||||||
|
assertNotNull(article.getAuthors)
|
||||||
|
article.getAuthors.asScala.foreach{a =>
|
||||||
|
assertNotNull(a)
|
||||||
|
assertNotNull(a.getFullName)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testParsingPubmedXML():Unit = {
|
||||||
|
val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")))
|
||||||
|
val parser = new PMParser(xml)
|
||||||
|
parser.foreach(checkPMArticle)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private def checkPubmedPublication(o:Oaf): Unit = {
|
||||||
|
assertTrue(o.isInstanceOf[Publication])
|
||||||
|
val p:Publication = o.asInstanceOf[Publication]
|
||||||
|
assertNotNull(p.getId)
|
||||||
|
assertNotNull(p.getTitle)
|
||||||
|
p.getTitle.asScala.foreach(t =>assertNotNull(t.getValue))
|
||||||
|
p.getAuthor.asScala.foreach(a =>assertNotNull(a.getFullname))
|
||||||
|
assertNotNull(p.getInstance())
|
||||||
|
p.getInstance().asScala.foreach { i =>
|
||||||
|
assertNotNull(i.getCollectedfrom)
|
||||||
|
assertNotNull(i.getPid)
|
||||||
|
assertNotNull(i.getInstancetype)
|
||||||
|
}
|
||||||
|
assertNotNull(p.getOriginalId)
|
||||||
|
p.getOriginalId.asScala.foreach(oId => assertNotNull(oId))
|
||||||
|
|
||||||
|
|
||||||
|
val hasPMC = p.getInstance().asScala.exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if (hasPMC) {
|
||||||
|
assertTrue(p.getOriginalId.asScala.exists(oId => oId.startsWith("od_______267::")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testPubmedOriginalID():Unit = {
|
||||||
|
val article:PMArticle = new PMArticle
|
||||||
|
|
||||||
|
|
||||||
|
article.setPmid("1234")
|
||||||
|
|
||||||
|
article.setTitle("a Title")
|
||||||
|
|
||||||
|
// VERIFY PUBLICATION IS NOT NULL
|
||||||
|
article.getPublicationTypes.add( new PMSubject("article",null, null))
|
||||||
|
var publication = PubMedToOaf.convert(article, vocabularies).asInstanceOf[Publication]
|
||||||
|
assertNotNull(publication)
|
||||||
|
assertEquals("50|pmid________::81dc9bdb52d04dc20036dbd8313ed055", publication.getId)
|
||||||
|
|
||||||
|
// VERIFY PUBLICATION ID DOES NOT CHANGE ALSO IF SETTING PMC IDENTIFIER
|
||||||
|
article.setPmcId("PMC1517292")
|
||||||
|
publication = PubMedToOaf.convert(article, vocabularies).asInstanceOf[Publication]
|
||||||
|
assertNotNull(publication)
|
||||||
|
assertEquals("50|pmid________::81dc9bdb52d04dc20036dbd8313ed055", publication.getId)
|
||||||
|
|
||||||
|
// VERIFY ORIGINAL ID GENERATE IN OLD WAY USING PMC IDENTIFIER EXISTS
|
||||||
|
|
||||||
|
|
||||||
|
val oldOpenaireID ="od_______267::0000072375bc0e68fa09d4e6b7658248"
|
||||||
|
|
||||||
|
val hasOldOpenAIREID = publication.getOriginalId.asScala.exists(o => o.equalsIgnoreCase(oldOpenaireID))
|
||||||
|
|
||||||
|
assertTrue(hasOldOpenAIREID)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testPubmedMapping() :Unit = {
|
||||||
|
|
||||||
|
val xml = new XMLEventReader(Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml")))
|
||||||
|
val parser = new PMParser(xml)
|
||||||
|
val results = ListBuffer[Oaf]()
|
||||||
|
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
results.foreach(checkPubmedPublication)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -0,0 +1,368 @@
|
||||||
|
|
||||||
|
{
|
||||||
|
|
||||||
|
"indexed": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2022,
|
||||||
|
4,
|
||||||
|
22
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2022-04-22T15:30:54Z",
|
||||||
|
"timestamp": 1650641454218
|
||||||
|
},
|
||||||
|
"reference-count": 31,
|
||||||
|
"publisher": "Cambridge University Press (CUP)",
|
||||||
|
"issue": "2",
|
||||||
|
"license": [
|
||||||
|
{
|
||||||
|
"start": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8,
|
||||||
|
22
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2017-08-22T00:00:00Z",
|
||||||
|
"timestamp": 1503360000000
|
||||||
|
},
|
||||||
|
"content-version": "unspecified",
|
||||||
|
"delay-in-days": 21,
|
||||||
|
"URL": "https://www.cambridge.org/core/terms"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"content-domain": {
|
||||||
|
"domain": [
|
||||||
|
|
||||||
|
],
|
||||||
|
"crossmark-restriction": false
|
||||||
|
},
|
||||||
|
"short-container-title": [
|
||||||
|
"Dance Res. J."
|
||||||
|
],
|
||||||
|
"published-print": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"abstract": "<jats:p>Gaga, a practice developed by Israeli choreographer Ohad Naharin, is one of the most popular training methods on the global dance market. Structured as a metatechnique, or a system for negotiating techniques within one's body, Gaga teaches students to both draw on and reject multiple movement techniques to create their own movement. I consider how the paradigms of choreography, technique, and improvisation are blurred together in the pedagogical model of a metatechnique and how training dancers to shift between choreographer, dancer, and improviser has significant ramifications for understanding their agency. The metatechnique model of Gaga falls in line with neoliberal values of efficiency and a wide range of skills and knowledge; this analysis provides an understanding of recent trends in dance training in relation to contemporary political and socioeconomic structures.</jats:p>",
|
||||||
|
"DOI": "10.1017/s0149767717000183",
|
||||||
|
"type": "journal-article",
|
||||||
|
"created": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8,
|
||||||
|
22
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2017-08-22T13:32:38Z",
|
||||||
|
"timestamp": 1503408758000
|
||||||
|
},
|
||||||
|
"page": "26-43",
|
||||||
|
"source": "Crossref",
|
||||||
|
"is-referenced-by-count": 10,
|
||||||
|
"title": [
|
||||||
|
"Gaga as Metatechnique: Negotiating Choreography, Improvisation, and Technique in a Neoliberal Dance Market"
|
||||||
|
],
|
||||||
|
"prefix": "10.1017",
|
||||||
|
"volume": "49",
|
||||||
|
"author": [
|
||||||
|
{
|
||||||
|
"given": "Meghan",
|
||||||
|
"family": "Quinlan",
|
||||||
|
"sequence": "first",
|
||||||
|
"affiliation": [
|
||||||
|
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"member": "56",
|
||||||
|
"published-online": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8,
|
||||||
|
22
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"reference": [
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref22",
|
||||||
|
"first-page": "38",
|
||||||
|
"article-title": "‘I Don't Want to do African … What About My Technique?’: Transforming Dancing Places into Spaces in the Academy",
|
||||||
|
"volume": "4",
|
||||||
|
"author": "Monroe",
|
||||||
|
"year": "2011",
|
||||||
|
"journal-title": "The Journal of Pan African Studies"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref8",
|
||||||
|
"volume-title": "Discipline and Punish: The Birth of the Prison",
|
||||||
|
"author": "Foucault",
|
||||||
|
"year": "1995"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref24",
|
||||||
|
"volume-title": "Sharing the Dance: Contact Improvisation and American Culture",
|
||||||
|
"author": "Novack",
|
||||||
|
"year": "1990"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref26",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1215/9780822387879"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref10",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1080/01472526.2015.1085759"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref4",
|
||||||
|
"doi-asserted-by": "crossref",
|
||||||
|
"first-page": "235",
|
||||||
|
"volume-title": "Meaning in Motion",
|
||||||
|
"author": "Foster",
|
||||||
|
"year": "1997",
|
||||||
|
"DOI": "10.1215/9780822397281-013"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref17",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1093/acprof:oso/9780199360369.001.0001"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref30",
|
||||||
|
"volume-title": "The Precariat: The New Dangerous Class",
|
||||||
|
"author": "Standing",
|
||||||
|
"year": "2011"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref7",
|
||||||
|
"volume-title": "Choreographing Empathy: Kinesthesia in Performance",
|
||||||
|
"author": "Foster",
|
||||||
|
"year": "2011"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref14",
|
||||||
|
"volume-title": "A Brief History of Neoliberalism",
|
||||||
|
"author": "Harvey",
|
||||||
|
"year": "2007"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref27",
|
||||||
|
"unstructured": "Quinlan Meghan . 2016. “Gaga as Politics: A Case Study of Contemporary Dance Training.” PhD diss., University of California, Riverside."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref11",
|
||||||
|
"first-page": "xiii",
|
||||||
|
"volume-title": "Taken by Surprise: A Dance Improvisation Reader",
|
||||||
|
"author": "Gere",
|
||||||
|
"year": "2003"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref13",
|
||||||
|
"volume-title": "Declaration",
|
||||||
|
"author": "Hardt",
|
||||||
|
"year": "2012"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref29",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1057/9780230236844_9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref5",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1057/9780230236844_6"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref28",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1017/S0149767700000528"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref12",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.3998/mpub.287881"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref18",
|
||||||
|
"first-page": "135",
|
||||||
|
"volume-title": "Taken by Surprise: A Dance Improvisation Reader",
|
||||||
|
"author": "Marks",
|
||||||
|
"year": "2003"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref3",
|
||||||
|
"doi-asserted-by": "crossref",
|
||||||
|
"volume-title": "Bodies That Matter: On the Discursive Limits of Sex",
|
||||||
|
"author": "Butler",
|
||||||
|
"year": "2011",
|
||||||
|
"DOI": "10.4324/9780203828274"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref32",
|
||||||
|
"volume-title": "Choreographing Difference: The Body and Identity in Contemporary Dance",
|
||||||
|
"author": "Albright",
|
||||||
|
"year": "1997"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref16",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1017/S0149767714000163"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref15",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.2307/1477803"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref21",
|
||||||
|
"first-page": "455",
|
||||||
|
"volume-title": "Incorporations",
|
||||||
|
"author": "Mauss",
|
||||||
|
"year": "1992"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref1",
|
||||||
|
"volume-title": "The Body Eclectic: Evolving Practices in Dance Training",
|
||||||
|
"author": "Bales",
|
||||||
|
"year": "2008"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref9",
|
||||||
|
"unstructured": "Gaga Movement Ltd. 2016. Gaga People. Dancers. Accessed June 28, 2016. http://gagapeople.com/english/."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref19",
|
||||||
|
"volume-title": "Critical Moves: Dance Studies in Theory and Politics",
|
||||||
|
"author": "Martin",
|
||||||
|
"year": "1998"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref2",
|
||||||
|
"volume-title": "Undoing the Demos: Neoliberalism's Stealth Revolution",
|
||||||
|
"author": "Brown",
|
||||||
|
"year": "2015"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref20",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1162/DRAM_a_00214"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref6",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1215/01610775-2009-016"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref23",
|
||||||
|
"doi-asserted-by": "publisher",
|
||||||
|
"DOI": "10.1093/acprof:oso/9780190201661.001.0001"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "S0149767717000183_ref25",
|
||||||
|
"volume-title": "Flexible Citizenship: The Cultural Logics of Transnationality",
|
||||||
|
"author": "Ong",
|
||||||
|
"year": "1999"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"container-title": [
|
||||||
|
"Dance Research Journal"
|
||||||
|
],
|
||||||
|
"original-title": [
|
||||||
|
|
||||||
|
],
|
||||||
|
"language": "en",
|
||||||
|
"link": [
|
||||||
|
{
|
||||||
|
"URL": "https://www.cambridge.org/core/services/aop-cambridge-core/content/view/S0149767717000183",
|
||||||
|
"content-type": "unspecified",
|
||||||
|
"content-version": "vor",
|
||||||
|
"intended-application": "similarity-checking"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deposited": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2020,
|
||||||
|
10,
|
||||||
|
16
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"date-time": "2020-10-16T01:00:32Z",
|
||||||
|
"timestamp": 1602810032000
|
||||||
|
},
|
||||||
|
"score": 1,
|
||||||
|
"resource": {
|
||||||
|
"primary": {
|
||||||
|
"URL": "https://www.cambridge.org/core/product/identifier/S0149767717000183/type/journal_article"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"subtitle": [
|
||||||
|
|
||||||
|
],
|
||||||
|
"short-title": [
|
||||||
|
|
||||||
|
],
|
||||||
|
"issued": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"references-count": 31,
|
||||||
|
"journal-issue": {
|
||||||
|
"issue": "2",
|
||||||
|
"published-print": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"alternative-id": [
|
||||||
|
"S0149767717000183"
|
||||||
|
],
|
||||||
|
"URL": "http://dx.doi.org/10.1017/s0149767717000183",
|
||||||
|
"relation": {
|
||||||
|
|
||||||
|
},
|
||||||
|
"ISSN": [
|
||||||
|
"0149-7677",
|
||||||
|
"1940-509X"
|
||||||
|
],
|
||||||
|
"issn-type": [
|
||||||
|
{
|
||||||
|
"value": "0149-7677",
|
||||||
|
"type": "print"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value": "1940-509X",
|
||||||
|
"type": "electronic"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"subject": [
|
||||||
|
"Visual Arts and Performing Arts"
|
||||||
|
],
|
||||||
|
"published": {
|
||||||
|
"date-parts": [
|
||||||
|
[
|
||||||
|
2017,
|
||||||
|
8
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
|
@ -475,6 +475,31 @@ class CrossrefMappingTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testConvertFromCrossRef2OafIssue(): Unit = {
|
||||||
|
val json = Source
|
||||||
|
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/article_nojournal.json"))
|
||||||
|
.mkString
|
||||||
|
assertNotNull(json)
|
||||||
|
|
||||||
|
assertFalse(json.isEmpty);
|
||||||
|
|
||||||
|
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
|
||||||
|
|
||||||
|
assertTrue(resultList.nonEmpty)
|
||||||
|
|
||||||
|
val items = resultList.filter(p => p.isInstanceOf[Publication])
|
||||||
|
|
||||||
|
assert(items.nonEmpty)
|
||||||
|
assert(items.size == 1)
|
||||||
|
val pub: Publication = items.head.asInstanceOf[Publication]
|
||||||
|
|
||||||
|
assertNotNull(pub.getJournal.getIssnPrinted)
|
||||||
|
assertNotNull(pub.getJournal.getIssnOnline)
|
||||||
|
assertNotNull(pub.getJournal.getName)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testSetDateOfAcceptanceCrossRef2Oaf(): Unit = {
|
def testSetDateOfAcceptanceCrossRef2Oaf(): Unit = {
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
@ -102,6 +103,7 @@ public class SparkBulkTagJob {
|
||||||
ResultTagger resultTagger = new ResultTagger();
|
ResultTagger resultTagger = new ResultTagger();
|
||||||
readPath(spark, inputPath, resultClazz)
|
readPath(spark, inputPath, resultClazz)
|
||||||
.map(patchResult(), Encoders.bean(resultClazz))
|
.map(patchResult(), Encoders.bean(resultClazz))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<R, R>) value -> resultTagger
|
(MapFunction<R, R>) value -> resultTagger
|
||||||
.enrichContextCriteria(
|
.enrichContextCriteria(
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.bulktag.eosc;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 21/07/22
|
||||||
|
*/
|
||||||
|
public class DatasourceMaster implements Serializable {
|
||||||
|
private String datasource;
|
||||||
|
private String master;
|
||||||
|
|
||||||
|
public String getDatasource() {
|
||||||
|
return datasource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDatasource(String datasource) {
|
||||||
|
this.datasource = datasource;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMaster() {
|
||||||
|
return master;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMaster(String master) {
|
||||||
|
this.master = master;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,136 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.bulktag.eosc;
|
||||||
|
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 21/07/22
|
||||||
|
*/
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.DbClient;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.common.RelationInverse;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
|
||||||
|
public class ReadMasterDatasourceFromDB implements Closeable {
|
||||||
|
|
||||||
|
private final DbClient dbClient;
|
||||||
|
private static final Log log = LogFactory.getLog(ReadMasterDatasourceFromDB.class);
|
||||||
|
|
||||||
|
private final BufferedWriter writer;
|
||||||
|
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static final String QUERY = "SELECT dso.id datasource, d.id master FROM " +
|
||||||
|
"(SELECT id FROM dsm_services WHERE id like 'eosc%') dso " +
|
||||||
|
"FULL JOIN " +
|
||||||
|
"(SELECT id, duplicate FROM dsm_dedup_services WHERE duplicate like 'eosc%')d " +
|
||||||
|
"ON dso.id = d.duplicate";
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
ReadMasterDatasourceFromDB.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/bulktag/datasourcemaster_parameters.json")));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String dbUrl = parser.get("postgresUrl");
|
||||||
|
final String dbUser = parser.get("postgresUser");
|
||||||
|
final String dbPassword = parser.get("postgresPassword");
|
||||||
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
|
|
||||||
|
try (
|
||||||
|
final ReadMasterDatasourceFromDB rmd = new ReadMasterDatasourceFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
|
||||||
|
dbPassword)) {
|
||||||
|
|
||||||
|
log.info("Processing datasources...");
|
||||||
|
rmd.execute(QUERY, rmd::datasourceMasterMap);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute(final String sql, final Function<ResultSet, DatasourceMaster> producer) {
|
||||||
|
|
||||||
|
dbClient.processResults(sql, rs -> writeMap(producer.apply(rs)));
|
||||||
|
}
|
||||||
|
|
||||||
|
public DatasourceMaster datasourceMasterMap(ResultSet rs) {
|
||||||
|
try {
|
||||||
|
DatasourceMaster dm = new DatasourceMaster();
|
||||||
|
String datasource = rs.getString("datasource");
|
||||||
|
dm.setDatasource(datasource);
|
||||||
|
String master = rs.getString("master");
|
||||||
|
if (StringUtils.isNotBlank(master))
|
||||||
|
dm.setMaster(OafMapperUtils.createOpenaireId(10, master, true));
|
||||||
|
else
|
||||||
|
dm.setMaster(OafMapperUtils.createOpenaireId(10, datasource, true));
|
||||||
|
return dm;
|
||||||
|
|
||||||
|
} catch (final SQLException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
dbClient.close();
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ReadMasterDatasourceFromDB(
|
||||||
|
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
|
||||||
|
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
|
||||||
|
FileSystem fileSystem = FileSystem.get(conf);
|
||||||
|
Path hdfsWritePath = new Path(hdfsPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = null;
|
||||||
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||||
|
} else {
|
||||||
|
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void writeMap(final DatasourceMaster dm) {
|
||||||
|
try {
|
||||||
|
writer.write(OBJECT_MAPPER.writeValueAsString(dm));
|
||||||
|
writer.newLine();
|
||||||
|
} catch (final IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,170 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.bulktag.eosc;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
||||||
|
import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
||||||
|
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.*;
|
||||||
|
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.TAGGING_TRUST;
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import javax.print.attribute.DocAttributeSet;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.bulktag.SparkBulkTagJob;
|
||||||
|
import eu.dnetlib.dhp.bulktag.community.*;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 21/07/22
|
||||||
|
*/
|
||||||
|
public class SparkEoscBulkTag implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(SparkEoscBulkTag.class);
|
||||||
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
SparkEoscBulkTag.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/bulktag/input_eosc_bulkTag_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String workingPath = parser.get("workingPath");
|
||||||
|
log.info("workingPath: {}", workingPath);
|
||||||
|
|
||||||
|
String datasourceMapPath = parser.get("datasourceMapPath");
|
||||||
|
log.info("datasourceMapPath: {}", datasourceMapPath);
|
||||||
|
|
||||||
|
final String resultClassName = parser.get("resultTableName");
|
||||||
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
|
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
CommunityConfiguration cc;
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
removeOutputDir(spark, workingPath);
|
||||||
|
execBulkTag(spark, inputPath, workingPath, datasourceMapPath, resultClazz);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <R extends Result> void execBulkTag(
|
||||||
|
SparkSession spark,
|
||||||
|
String inputPath,
|
||||||
|
String workingPath,
|
||||||
|
String datasourceMapPath,
|
||||||
|
Class<R> resultClazz) {
|
||||||
|
|
||||||
|
List<String> hostedByList = readPath(spark, datasourceMapPath, DatasourceMaster.class)
|
||||||
|
.map((MapFunction<DatasourceMaster, String>) dm -> dm.getMaster(), Encoders.STRING())
|
||||||
|
.collectAsList();
|
||||||
|
|
||||||
|
readPath(spark, inputPath, resultClazz)
|
||||||
|
.map(patchResult(), Encoders.bean(resultClazz))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(
|
||||||
|
(MapFunction<R, R>) value -> enrich(value, hostedByList),
|
||||||
|
Encoders.bean(resultClazz))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingPath);
|
||||||
|
|
||||||
|
readPath(spark, workingPath, resultClazz)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(inputPath);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <R extends Result> R enrich(R value, List<String> hostedByList) {
|
||||||
|
if (value
|
||||||
|
.getInstance()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
i -> (hostedByList.contains(i.getHostedby().getKey())) ||
|
||||||
|
(value.getEoscifguidelines() != null && value.getEoscifguidelines().size() > 0))
|
||||||
|
&&
|
||||||
|
!value.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) {
|
||||||
|
Context context = new Context();
|
||||||
|
context.setId("eosc");
|
||||||
|
context
|
||||||
|
.setDataInfo(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, BULKTAG_DATA_INFO_TYPE, true, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
CLASS_ID_DATASOURCE, CLASS_NAME_BULKTAG_DATASOURCE,
|
||||||
|
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS),
|
||||||
|
TAGGING_TRUST)));
|
||||||
|
value.getContext().add(context);
|
||||||
|
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <R> Dataset<R> readPath(
|
||||||
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO remove this hack as soon as the values fixed by this method will be provided as NON null
|
||||||
|
private static <R extends Result> MapFunction<R, R> patchResult() {
|
||||||
|
return r -> {
|
||||||
|
if (r.getDataInfo().getDeletedbyinference() == null) {
|
||||||
|
r.getDataInfo().setDeletedbyinference(false);
|
||||||
|
}
|
||||||
|
if (r.getContext() == null) {
|
||||||
|
r.setContext(new ArrayList<>());
|
||||||
|
}
|
||||||
|
return r;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,13 +1,10 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.bulktag;
|
package eu.dnetlib.dhp.bulktag.eosc;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -21,35 +18,11 @@ import org.slf4j.LoggerFactory;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
||||||
|
|
||||||
public class SparkEoscTag {
|
public class SparkEoscTag {
|
||||||
private static final Logger log = LoggerFactory.getLogger(SparkEoscTag.class);
|
private static final Logger log = LoggerFactory.getLogger(SparkEoscTag.class);
|
||||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
public static final Qualifier EOSC_QUALIFIER = OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
"EOSC",
|
|
||||||
"European Open Science Cloud",
|
|
||||||
ModelConstants.DNET_SUBJECT_TYPOLOGIES, ModelConstants.DNET_SUBJECT_TYPOLOGIES);
|
|
||||||
public static final DataInfo EOSC_DATAINFO = OafMapperUtils
|
|
||||||
.dataInfo(
|
|
||||||
false, "propagation", true, false,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
"propagation:subject", "Inferred by OpenAIRE",
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
||||||
"0.9");
|
|
||||||
public final static StructuredProperty EOSC_NOTEBOOK = OafMapperUtils
|
|
||||||
.structuredProperty(
|
|
||||||
"EOSC::Jupyter Notebook", EOSC_QUALIFIER, EOSC_DATAINFO);
|
|
||||||
public final static StructuredProperty EOSC_GALAXY = OafMapperUtils
|
|
||||||
.structuredProperty(
|
|
||||||
"EOSC::Galaxy Workflow", EOSC_QUALIFIER, EOSC_DATAINFO);
|
|
||||||
public final static StructuredProperty EOSC_TWITTER = OafMapperUtils
|
|
||||||
.structuredProperty(
|
|
||||||
"EOSC::Twitter Data", EOSC_QUALIFIER, EOSC_DATAINFO);
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration = IOUtils
|
String jsonConfiguration = IOUtils
|
||||||
|
@ -84,29 +57,35 @@ public class SparkEoscTag {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static EoscIfGuidelines newInstance(String code, String label, String url, String semantics) {
|
||||||
|
EoscIfGuidelines eig = new EoscIfGuidelines();
|
||||||
|
eig.setCode(code);
|
||||||
|
eig.setLabel(label);
|
||||||
|
eig.setUrl(url);
|
||||||
|
eig.setSemanticRelation(semantics);
|
||||||
|
return eig;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private static void execEoscTag(SparkSession spark, String inputPath, String workingPath) {
|
private static void execEoscTag(SparkSession spark, String inputPath, String workingPath) {
|
||||||
|
|
||||||
readPath(spark, inputPath + "/software", Software.class)
|
readPath(spark, inputPath + "/software", Software.class)
|
||||||
.map((MapFunction<Software, Software>) s -> {
|
.map((MapFunction<Software, Software>) s -> {
|
||||||
List<StructuredProperty> sbject;
|
|
||||||
if (!Optional.ofNullable(s.getSubject()).isPresent())
|
|
||||||
s.setSubject(new ArrayList<>());
|
|
||||||
sbject = s.getSubject();
|
|
||||||
|
|
||||||
if (containsCriteriaNotebook(s)) {
|
if (containsCriteriaNotebook(s)) {
|
||||||
sbject.add(EOSC_NOTEBOOK);
|
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
||||||
if (sbject.stream().anyMatch(sb -> sb.getValue().equals("EOSC Jupyter Notebook"))) {
|
s.setEoscifguidelines(new ArrayList<>());
|
||||||
sbject = sbject.stream().map(sb -> {
|
addEIG(
|
||||||
if (sb.getValue().equals("EOSC Jupyter Notebook")) {
|
s.getEoscifguidelines(), "EOSC::Jupyter Notebook", "EOSC::Jupyter Notebook", "",
|
||||||
return null;
|
"compliesWith");
|
||||||
}
|
|
||||||
return sb;
|
|
||||||
}).filter(Objects::nonNull).collect(Collectors.toList());
|
|
||||||
s.setSubject(sbject);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (containsCriteriaGalaxy(s)) {
|
if (containsCriteriaGalaxy(s)) {
|
||||||
sbject.add(EOSC_GALAXY);
|
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
||||||
|
s.setEoscifguidelines(new ArrayList<>());
|
||||||
|
|
||||||
|
addEIG(
|
||||||
|
s.getEoscifguidelines(), "EOSC::Galaxy Workflow", "EOSC::Galaxy Workflow", "", "compliesWith");
|
||||||
}
|
}
|
||||||
return s;
|
return s;
|
||||||
}, Encoders.bean(Software.class))
|
}, Encoders.bean(Software.class))
|
||||||
|
@ -123,15 +102,17 @@ public class SparkEoscTag {
|
||||||
|
|
||||||
readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class)
|
readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class)
|
||||||
.map((MapFunction<OtherResearchProduct, OtherResearchProduct>) orp -> {
|
.map((MapFunction<OtherResearchProduct, OtherResearchProduct>) orp -> {
|
||||||
List<StructuredProperty> sbject;
|
|
||||||
if (!Optional.ofNullable(orp.getSubject()).isPresent())
|
if (!Optional.ofNullable(orp.getEoscifguidelines()).isPresent())
|
||||||
orp.setSubject(new ArrayList<>());
|
orp.setEoscifguidelines(new ArrayList<>());
|
||||||
sbject = orp.getSubject();
|
|
||||||
if (containsCriteriaGalaxy(orp)) {
|
if (containsCriteriaGalaxy(orp)) {
|
||||||
sbject.add(EOSC_GALAXY);
|
addEIG(
|
||||||
|
orp.getEoscifguidelines(), "EOSC::Galaxy Workflow", "EOSC::Galaxy Workflow", "",
|
||||||
|
"compliesWith");
|
||||||
}
|
}
|
||||||
if (containscriteriaTwitter(orp)) {
|
if (containscriteriaTwitter(orp)) {
|
||||||
sbject.add(EOSC_TWITTER);
|
addEIG(orp.getEoscifguidelines(), "EOSC::Twitter Data", "EOSC::Twitter Data", "", "compliesWith");
|
||||||
}
|
}
|
||||||
return orp;
|
return orp;
|
||||||
}, Encoders.bean(OtherResearchProduct.class))
|
}, Encoders.bean(OtherResearchProduct.class))
|
||||||
|
@ -148,12 +129,11 @@ public class SparkEoscTag {
|
||||||
|
|
||||||
readPath(spark, inputPath + "/dataset", Dataset.class)
|
readPath(spark, inputPath + "/dataset", Dataset.class)
|
||||||
.map((MapFunction<Dataset, Dataset>) d -> {
|
.map((MapFunction<Dataset, Dataset>) d -> {
|
||||||
List<StructuredProperty> sbject;
|
|
||||||
if (!Optional.ofNullable(d.getSubject()).isPresent())
|
if (!Optional.ofNullable(d.getEoscifguidelines()).isPresent())
|
||||||
d.setSubject(new ArrayList<>());
|
d.setEoscifguidelines(new ArrayList<>());
|
||||||
sbject = d.getSubject();
|
|
||||||
if (containscriteriaTwitter(d)) {
|
if (containscriteriaTwitter(d)) {
|
||||||
sbject.add(EOSC_TWITTER);
|
addEIG(d.getEoscifguidelines(), "EOSC::Twitter Data", "EOSC::Twitter Data", "", "compliesWith");
|
||||||
}
|
}
|
||||||
return d;
|
return d;
|
||||||
}, Encoders.bean(Dataset.class))
|
}, Encoders.bean(Dataset.class))
|
||||||
|
@ -169,6 +149,12 @@ public class SparkEoscTag {
|
||||||
.json(inputPath + "/dataset");
|
.json(inputPath + "/dataset");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void addEIG(List<EoscIfGuidelines> eoscifguidelines, String code, String label, String url,
|
||||||
|
String sem) {
|
||||||
|
if (!eoscifguidelines.stream().anyMatch(eig -> eig.getCode().equals(code)))
|
||||||
|
eoscifguidelines.add(newInstance(code, label, url, sem));
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean containscriteriaTwitter(Result r) {
|
private static boolean containscriteriaTwitter(Result r) {
|
||||||
Set<String> words = getWordsSP(r.getTitle());
|
Set<String> words = getWordsSP(r.getTitle());
|
||||||
words.addAll(getWordsF(r.getDescription()));
|
words.addAll(getWordsF(r.getDescription()));
|
||||||
|
@ -212,13 +198,6 @@ public class SparkEoscTag {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Set<String> getSubjects(List<StructuredProperty> s) {
|
|
||||||
Set<String> subjects = new HashSet<>();
|
|
||||||
s.stream().forEach(sbj -> subjects.addAll(Arrays.asList(sbj.getValue().toLowerCase().split(" "))));
|
|
||||||
s.stream().forEach(sbj -> subjects.add(sbj.getValue().toLowerCase()));
|
|
||||||
return subjects;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Set<String> getWordsSP(List<StructuredProperty> elem) {
|
private static Set<String> getWordsSP(List<StructuredProperty> elem) {
|
||||||
Set<String> words = new HashSet<>();
|
Set<String> words = new HashSet<>();
|
||||||
Optional
|
Optional
|
||||||
|
@ -242,9 +221,7 @@ public class SparkEoscTag {
|
||||||
t -> words
|
t -> words
|
||||||
.addAll(
|
.addAll(
|
||||||
Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" ")))));
|
Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" ")))));
|
||||||
// elem
|
|
||||||
// .forEach(
|
|
||||||
// t -> words.addAll(Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" "))));
|
|
||||||
return words;
|
return words;
|
||||||
|
|
||||||
}
|
}
|
|
@ -6,6 +6,7 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -87,6 +88,8 @@ public class PrepareDatasourceCountryAssociation {
|
||||||
Dataset<Datasource> datasource = readPath(spark, inputPath + "/datasource", Datasource.class)
|
Dataset<Datasource> datasource = readPath(spark, inputPath + "/datasource", Datasource.class)
|
||||||
.filter(
|
.filter(
|
||||||
(FilterFunction<Datasource>) ds -> !ds.getDataInfo().getDeletedbyinference() &&
|
(FilterFunction<Datasource>) ds -> !ds.getDataInfo().getDeletedbyinference() &&
|
||||||
|
Optional.ofNullable(ds.getDatasourcetype()).isPresent() &&
|
||||||
|
Optional.ofNullable(ds.getDatasourcetype().getClassid()).isPresent() &&
|
||||||
(allowedtypes.contains(ds.getDatasourcetype().getClassid()) ||
|
(allowedtypes.contains(ds.getDatasourcetype().getClassid()) ||
|
||||||
whitelist.contains(ds.getId())));
|
whitelist.contains(ds.getId())));
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "p",
|
||||||
|
"paramLongName": "hdfsPath",
|
||||||
|
"paramDescription": "the path where storing the sequential file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "nn",
|
||||||
|
"paramLongName": "hdfsNameNode",
|
||||||
|
"paramDescription": "the name node on hdfs",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgurl",
|
||||||
|
"paramLongName": "postgresUrl",
|
||||||
|
"paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pguser",
|
||||||
|
"paramLongName": "postgresUser",
|
||||||
|
"paramDescription": "postgres user",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgpasswd",
|
||||||
|
"paramLongName": "postgresPassword",
|
||||||
|
"paramDescription": "postgres password",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,34 @@
|
||||||
|
[
|
||||||
|
|
||||||
|
{
|
||||||
|
"paramName":"s",
|
||||||
|
"paramLongName":"sourcePath",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dmp",
|
||||||
|
"paramLongName":"datasourceMapPath",
|
||||||
|
"paramDescription": "the path where the association datasource master has been stored",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName":"tn",
|
||||||
|
"paramLongName":"resultTableName",
|
||||||
|
"paramDescription": "the name of the result table we are currently working on",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "wp",
|
||||||
|
"paramLongName": "workingPath",
|
||||||
|
"paramDescription": "the path used to store temporary output files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ssm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
|
||||||
|
]
|
|
@ -16,6 +16,21 @@
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the output path</description>
|
<description>the output path</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>postgresURL</name>
|
||||||
|
<description>the url of the postgress server to query</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>postgresUser</name>
|
||||||
|
<description>the username to access the postgres db</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>postgresPassword</name>
|
||||||
|
<description>the postgres password</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<global>
|
<global>
|
||||||
|
@ -211,7 +226,7 @@
|
||||||
<master>yarn-cluster</master>
|
<master>yarn-cluster</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>EOSC_tagging</name>
|
<name>EOSC_tagging</name>
|
||||||
<class>eu.dnetlib.dhp.bulktag.SparkEoscTag</class>
|
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscTag</class>
|
||||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--num-executors=${sparkExecutorNumber}
|
--num-executors=${sparkExecutorNumber}
|
||||||
|
@ -226,10 +241,132 @@
|
||||||
<arg>--sourcePath</arg><arg>${outputPath}</arg>
|
<arg>--sourcePath</arg><arg>${outputPath}</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/eoscTag</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/eoscTag</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="eosc_get_datasource_master"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<action name="eosc_get_datasource_master">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.bulktag.eosc.ReadMasterDatasourceFromDB</main-class>
|
||||||
|
<arg>--hdfsPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||||
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="fork_eosc_context_tag"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<fork name="fork_eosc_context_tag">
|
||||||
|
<path start="eosc_context_tag_publication"/>
|
||||||
|
<path start="eosc_context_tag_dataset"/>
|
||||||
|
<path start="eosc_context_tag_otherresearchproduct"/>
|
||||||
|
<path start="eosc_context_tag_software"/>
|
||||||
|
</fork>
|
||||||
|
|
||||||
|
<action name="eosc_context_tag_publication">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>EOSC_tagging</name>
|
||||||
|
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||||
|
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--num-executors=${sparkExecutorNumber}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${outputPath}/publication</arg>
|
||||||
|
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/publication</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
|
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="wait_eosc_context_tag"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="eosc_context_tag_dataset">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>EOSC_tagging</name>
|
||||||
|
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||||
|
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--num-executors=${sparkExecutorNumber}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${outputPath}/dataset</arg>
|
||||||
|
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/dataset</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
|
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="wait_eosc_context_tag"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<action name="eosc_context_tag_software">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>EOSC_tagging</name>
|
||||||
|
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||||
|
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--num-executors=${sparkExecutorNumber}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${outputPath}/software</arg>
|
||||||
|
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/software</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
|
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="wait_eosc_context_tag"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<action name="eosc_context_tag_otherresearchproduct">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>EOSC_tagging</name>
|
||||||
|
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||||
|
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--num-executors=${sparkExecutorNumber}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
||||||
|
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/otherresearchproduct</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
|
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="wait_eosc_context_tag"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<join name="wait_eosc_context_tag" to="End"/>
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -0,0 +1,162 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.bulktag;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 22/07/22
|
||||||
|
*/
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
|
||||||
|
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc
|
||||||
|
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc
|
||||||
|
//"50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7" has two instance one hostedby eosc
|
||||||
|
//"50|475c1990cbb2::3894c94123e96df8a21249957cf160cb" has EoscTag
|
||||||
|
|
||||||
|
public class EOSCContextTaggingTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(EOSCContextTaggingTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(EOSCContextTaggingTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(EOSCContextTaggingTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(EOSCTagJobTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void EoscContextTagTest() throws Exception {
|
||||||
|
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Dataset>) value -> OBJECT_MAPPER.readValue(value, Dataset.class),
|
||||||
|
Encoders.bean(Dataset.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingDir.toString() + "/input/dataset");
|
||||||
|
|
||||||
|
SparkEoscBulkTag
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-sourcePath",
|
||||||
|
workingDir.toString() + "/input/dataset",
|
||||||
|
"-workingPath", workingDir.toString() + "/working/dataset",
|
||||||
|
"-datasourceMapPath",
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
||||||
|
.getPath(),
|
||||||
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Dataset> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/input/dataset")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(10, tmp.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
4,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea")
|
||||||
|
&&
|
||||||
|
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1")
|
||||||
|
&&
|
||||||
|
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||||
|
&&
|
||||||
|
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||||
|
&&
|
||||||
|
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,21 +1,18 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.bulktag;
|
package eu.dnetlib.dhp.bulktag;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.ZENODO_COMMUNITY_INDICATOR;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscTag;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.Row;
|
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
@ -126,10 +123,23 @@ public class EOSCTagJobTest {
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
4,
|
4,
|
||||||
tmp
|
tmp
|
||||||
|
.filter(s -> s.getEoscifguidelines() != null)
|
||||||
.filter(
|
.filter(
|
||||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
s -> s
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Jupyter Notebook")))
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.size());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1, tmp
|
1, tmp
|
||||||
|
@ -140,6 +150,16 @@ public class EOSCTagJobTest {
|
||||||
.size());
|
.size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertFalse(
|
||||||
tmp
|
tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -166,16 +186,24 @@ public class EOSCTagJobTest {
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::501b25d420f808c8eddcd9b16e917f11"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines() == null);
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
9, tmp
|
8, tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||||
.collect()
|
.collect()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.size());
|
.size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertFalse(
|
||||||
tmp
|
tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -183,6 +211,23 @@ public class EOSCTagJobTest {
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
|
@ -201,17 +246,24 @@ public class EOSCTagJobTest {
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::5aec1186054301b66c0c5dc35972a589"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines() == null);
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
9, tmp
|
8, tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||||
.collect()
|
.collect()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.size());
|
.size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertFalse(
|
||||||
tmp
|
tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -219,14 +271,31 @@ public class EOSCTagJobTest {
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||||
|
|
||||||
List<StructuredProperty> subjects = tmp
|
List<StructuredProperty> subjects = tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::6e7a9b21a2feef45673890432af34244"))
|
.filter(sw -> sw.getId().equals("50|od______1582::6e7a9b21a2feef45673890432af34244"))
|
||||||
.collect()
|
.collect()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getSubject();
|
.getSubject();
|
||||||
Assertions.assertEquals(8, subjects.size());
|
Assertions.assertEquals(7, subjects.size());
|
||||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
|
||||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("jupyter")));
|
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("jupyter")));
|
||||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("Modeling and Simulation")));
|
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("Modeling and Simulation")));
|
||||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("structure granulaire")));
|
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("structure granulaire")));
|
||||||
|
@ -250,6 +319,17 @@ public class EOSCTagJobTest {
|
||||||
.filter(
|
.filter(
|
||||||
ds -> ds.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
ds -> ds.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||||
.count());
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
0, sc
|
||||||
|
.textFile(workingDir.toString() + "/input/dataset")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class))
|
||||||
|
.filter(
|
||||||
|
ds -> ds
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Jupyter Notebook")))
|
||||||
|
.count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
|
@ -264,7 +344,22 @@ public class EOSCTagJobTest {
|
||||||
.textFile(workingDir.toString() + "/input/otherresearchproduct")
|
.textFile(workingDir.toString() + "/input/otherresearchproduct")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, OtherResearchProduct.class))
|
.map(item -> OBJECT_MAPPER.readValue(item, OtherResearchProduct.class))
|
||||||
.filter(
|
.filter(
|
||||||
ds -> ds.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
orp -> orp
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
0, sc
|
||||||
|
.textFile(workingDir.toString() + "/input/otherresearchproduct")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, OtherResearchProduct.class))
|
||||||
|
.filter(
|
||||||
|
orp -> orp
|
||||||
|
.getSubject()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getValue().equals("EOSC::Jupyter Notebook")))
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
// spark.stop();
|
// spark.stop();
|
||||||
|
@ -326,22 +421,41 @@ public class EOSCTagJobTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
0,
|
||||||
tmp
|
tmp
|
||||||
.filter(
|
.filter(
|
||||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
||||||
.count());
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
s -> s.getEoscifguidelines() != null)
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
s -> s.getEoscifguidelines() != null)
|
||||||
|
.filter(
|
||||||
|
s -> s
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")))
|
||||||
|
.count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, tmp
|
1, tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
.collect()
|
.collect()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.size());
|
.size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertFalse(
|
||||||
tmp
|
tmp
|
||||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -350,6 +464,24 @@ public class EOSCTagJobTest {
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
tmp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
5, tmp
|
5, tmp
|
||||||
|
@ -385,22 +517,34 @@ public class EOSCTagJobTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
0,
|
||||||
orp
|
orp
|
||||||
.filter(
|
.filter(
|
||||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
||||||
.count());
|
.count());
|
||||||
|
orp.foreach(o -> System.out.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
3, orp
|
1, orp
|
||||||
|
.filter(o -> o.getEoscifguidelines() != null)
|
||||||
|
.filter(
|
||||||
|
o -> o
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2, orp
|
||||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||||
.collect()
|
.collect()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.size());
|
.size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertFalse(
|
||||||
orp
|
orp
|
||||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -408,6 +552,23 @@ public class EOSCTagJobTest {
|
||||||
.getSubject()
|
.getSubject()
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, orp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
orp
|
||||||
|
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(s -> s.getCode().equals("EOSC::Galaxy Workflow")));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
|
@ -516,10 +677,20 @@ public class EOSCTagJobTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
3,
|
0,
|
||||||
orp
|
orp
|
||||||
.filter(s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Twitter Data")))
|
.filter(s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Twitter Data")))
|
||||||
.count());
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
3,
|
||||||
|
orp
|
||||||
|
.filter(
|
||||||
|
s -> s
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Twitter Data")))
|
||||||
|
.count());
|
||||||
|
|
||||||
JavaRDD<Dataset> dats = sc
|
JavaRDD<Dataset> dats = sc
|
||||||
.textFile(workingDir.toString() + "/input/dataset")
|
.textFile(workingDir.toString() + "/input/dataset")
|
||||||
|
@ -531,7 +702,11 @@ public class EOSCTagJobTest {
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
3,
|
3,
|
||||||
dats
|
dats
|
||||||
.filter(s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Twitter Data")))
|
.filter(
|
||||||
|
s -> s
|
||||||
|
.getEoscifguidelines()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(eig -> eig.getCode().equals("EOSC::Twitter Data")))
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,318 @@
|
||||||
|
{"datasource":"eosc________::100percentit::100percentit.100_percent_it_trusted_cloud","master":"10|eosc________::7ef2576047f040612b983a27347471fc"}
|
||||||
|
{"datasource":"eosc________::altec::altec.space-vis_adn_service","master":"10|eosc________::2946c48bbcc514ad76bbbf727d5d8fbc"}
|
||||||
|
{"datasource":"eosc________::astron::astron.","master":"10|eosc________::acb262d4bfdeb6aa9b463a4a6d0d662a"}
|
||||||
|
{"datasource":"eosc________::athena::athena.atmo-flud","master":"10|eosc________::ac448975e1d7f8b0266c8bb3b3992029"}
|
||||||
|
{"datasource":"eosc________::athena::athena.uw-map","master":"10|eosc________::5f2a401cf8ce9dc22a3776cea519b594"}
|
||||||
|
{"datasource":"eosc________::athena::athena.verbal_aggression_analyser_va_analyser","master":"10|eosc________::8b26233e89a50e3754972b1341130494"}
|
||||||
|
{"datasource":"eosc________::authenix::authenix.authenix","master":"10|eosc________::3cd84764da5728473593a580efb29a40"}
|
||||||
|
{"datasource":"eosc________::bineo::bineo.cos4bio","master":"10|eosc________::903e0526a6e56eeaf0e4561aa862ecb8"}
|
||||||
|
{"datasource":"eosc________::blue-cloud::blue-cloud.phytoplankton_eovs","master":"10|eosc________::c2438d79b48baf817956f3856877b3b8"}
|
||||||
|
{"datasource":"eosc________::bsc-es::bsc-es.bdrc_-_barcelona_dust_regional_center","master":"10|eosc________::756664ca614118315840eb8e985e4377"}
|
||||||
|
{"datasource":"eosc________::bsc-es::bsc-es.openebench","master":"10|eosc________::69ed72b873b803feed5ba6ae47548419"}
|
||||||
|
{"datasource":"eosc________::capsh::capsh.dissemin","master":"10|eosc________::e81587742e4107ce83723df17c27cb40"}
|
||||||
|
{"datasource":"eosc________::carlzeissm::carlzeissm.aper","master":"10|eosc________::f3beb9ee5ee293b723e2edd6f990fde3"}
|
||||||
|
{"datasource":"eosc________::ccsd::ccsd.episciences","master":"10|eosc________::e1e9de0dbf4bce79c49338d7cf9327e2"}
|
||||||
|
{"datasource":"eosc________::cds::cds.simbad_simbad_astronomical_database_provides_basic_data_cross-identifications_bibliography_and_measurements_for_astronomical_objects_outside_the_solar_system","master":"10|eosc________::a1e41e71453ac32161f4ac3f5c0e0421"}
|
||||||
|
{"datasource":"eosc________::centerdata::centerdata.surveycodingsorg","master":"10|eosc________::72db73ab253727c889905da50f506d10"}
|
||||||
|
{"datasource":"eosc________::cesga::cesga.finisterrae","master":"10|eosc________::6af4303d93f72744cc4c3c815ed2c9a0"}
|
||||||
|
{"datasource":"eosc________::cesnet::cesnet.metacentrum_cloud","master":"10|eosc________::cebfaa2d0b93502d56a8fbeb6b66cfbe"}
|
||||||
|
{"datasource":"eosc________::cesnet::cesnet.object_based_storage","master":"10|eosc________::1c5b55339bb86ff997a256d42d7be4b0"}
|
||||||
|
{"datasource":"eosc________::cesnet::cesnet.umsa_-_untargeted_mass_spectrometry_data_analysis","master":"10|eosc________::d928868211759352cb1604713e0347ec"}
|
||||||
|
{"datasource":"eosc________::cessda-eric::cessda-eric.cessda_data_catalogue","master":"10|fairsharing_::936824c0191953647ec609b4f49bc964"}
|
||||||
|
{"datasource":"eosc________::cessda-eric::cessda-eric.data_management_expert_guide_dmeg","master":"10|eosc________::22c14aaf31fc64424fa97adffe6380b9"}
|
||||||
|
{"datasource":"eosc________::cessda-eric::cessda-eric.elsst__european_language_social_science_thesaurus","master":"10|eosc________::5b30e057381cf0200dc2cdc7b562f570"}
|
||||||
|
{"datasource":"eosc________::cines::cines.etdr","master":"10|eosc________::3b7f7d6aafb0154025330183d59ce670"}
|
||||||
|
{"datasource":"eosc________::clarin-eric::clarin-eric.language_resource_switchboard","master":"10|eosc________::3531aa80dbe2b1018133b510a933de40"}
|
||||||
|
{"datasource":"eosc________::clarin-eric::clarin-eric.virtual_collection_registry","master":"10|eosc________::454e4f7f9f53d9dacf9dc3ba27902f16"}
|
||||||
|
{"datasource":"eosc________::clarin-eric::clarin-eric.virtual_language_observatory","master":"10|eosc________::4db0c877190783461728c6714cb66cbc"}
|
||||||
|
{"datasource":"eosc________::cloudferro::cloudferro.data_collections_catalog","master":"10|eosc________::eba1540eb9e87231fdf366eb23d16c3a"}
|
||||||
|
{"datasource":"eosc________::cloudferro::cloudferro.data_related_services_-_eo_browser","master":"10|eosc________::c24ebda20485c08293b72561ee3c634b"}
|
||||||
|
{"datasource":"eosc________::cloudferro::cloudferro.data_related_services_-_eo_finder","master":"10|eosc________::3d68186239b6c0f0d677ff55d9b549d1"}
|
||||||
|
{"datasource":"eosc________::cloudferro::cloudferro.infrastructure","master":"10|eosc________::ac7e3c0151fa3f11d3a7739dddaa3416"}
|
||||||
|
{"datasource":"eosc________::cmcc::cmcc.enes_data_space","master":"10|eosc________::2925e4df4147819e5b5d2f886f40e3a2"}
|
||||||
|
{"datasource":"eosc________::cnb-csic::cnb-csic.3dbionotes-ws_web_application_to_annotate_biochemical_and_biomedical_information_onto_structural_models","master":"10|eosc________::77fe0a66415f2440ab60d47dcee678a5"}
|
||||||
|
{"datasource":"eosc________::cnb-csic::cnb-csic.scipioncloud","master":"10|eosc________::7f09b7fee99363813f24aca9ebdecf61"}
|
||||||
|
{"datasource":"eosc________::cnr-iia::cnr-iia.geo_dab","master":"10|eosc________::108b0148352c15ee1ce935699e09add3"}
|
||||||
|
{"datasource":"eosc________::collabwith::collabwith.collabwith_marketplace","master":"10|eosc________::894a0ffa7768b228c1b46793670c85e6"}
|
||||||
|
{"datasource":"eosc________::coronis_computing_sl::coronis_computing_sl.uw-mos","master":"10|eosc________::9cbf0a75d817e291771b8bce6440f5f4"}
|
||||||
|
{"datasource":"eosc________::coronis_computing_sl::coronis_computing_sl.vd-maps","master":"10|eosc________::b5af1514b39d8e021554a73076a694d9"}
|
||||||
|
{"datasource":"eosc________::creaf::creaf.nimmbus_geospatial_user_feedback","master":"10|eosc________::86c325db16448760b3390dda7e46631a"}
|
||||||
|
{"datasource":"eosc________::creatis::creatis.virtual_imaging_platform","master":"10|eosc________::01a45ac2677f89414af91e651735846d"}
|
||||||
|
{"datasource":"eosc________::cs_group::cs_group.ai4geo_engine","master":"10|eosc________::c61211295d27e5e08f4c64f3e3098294"}
|
||||||
|
{"datasource":"eosc________::csc-fi::csc-fi.chipster","master":"10|eosc________::61549f785a2c93939be011b0453a6981"}
|
||||||
|
{"datasource":"eosc________::csc-fi::csc-fi.cpouta","master":"10|eosc________::d71c843b4e00eff17db07bf9d10769f9"}
|
||||||
|
{"datasource":"eosc________::csc-fi::csc-fi.csc_epouta","master":"10|eosc________::4493bd6a93e5b8465fda8cf7ab2dfdea"}
|
||||||
|
{"datasource":"eosc________::csc-fi::csc-fi.rahti_container_cloud","master":"10|eosc________::cc60eb9fc76f9598ee581eff0792573b"}
|
||||||
|
{"datasource":"eosc________::cscs::cscs.object_storage","master":"10|eosc________::3da6a817fe85ef43f7d97ef07e467d45"}
|
||||||
|
{"datasource":"eosc________::csi_piemonte::csi_piemonte.nivola2","master":"10|eosc________::ac6483be3e556c8652b8595680795983"}
|
||||||
|
{"datasource":"eosc________::csic::csic.csic_cloud_infrastructure","master":"10|eosc________::05ea2eb193382e22f32b32fbe9a4d961"}
|
||||||
|
{"datasource":"eosc________::cyberbotics::cyberbotics.robotbenchmark","master":"10|eosc________::27ee094c68b7a758ca2915aca6215a1d"}
|
||||||
|
{"datasource":"eosc________::d4science::d4science.alien_and_invasive_species_vre","master":"10|eosc________::b5cff6d55dcf6c20e78a0f1f847b3005"}
|
||||||
|
{"datasource":"eosc________::d4science::d4science.rprototypinglab_virtual_research_environment","master":"10|eosc________::8073ab0dbb22dc3b9f17627a7b25903f"}
|
||||||
|
{"datasource":"eosc________::d4science::d4science.visual_media_service_vre","master":"10|eosc________::eabf459f53c2bfe6247f006fcc0f4db7"}
|
||||||
|
{"datasource":"eosc________::dariah_eric::dariah_eric.dariah-campus","master":"10|eosc________::9c63075d6642a2d269776c2b90c2f976"}
|
||||||
|
{"datasource":"eosc________::dariah_eric::dariah_eric.ssh_open_marketplace","master":"10|eosc________::91fe494a3c21805febb03353152f1212"}
|
||||||
|
{"datasource":"eosc________::datacite::datacite.datacite_doi_registration_service","master":"10|eosc________::c146a470f01ee7ded3b55acda9362e7f"}
|
||||||
|
{"datasource":"eosc________::dcc-uk::dcc-uk.dmponline","master":"10|eosc________::fe480090e0739dab86b24a11177eeffd"}
|
||||||
|
{"datasource":"eosc________::denbi::denbi.cloud","master":"10|eosc________::59399e560967488c0ae0329e0d37f5b4"}
|
||||||
|
{"datasource":"eosc________::desy::desy.pan_data","master":"10|eosc________::52008fe404bf2e939140109162f9233f"}
|
||||||
|
{"datasource":"eosc________::desy::desy.pan_faas","master":"10|eosc________::026939c4b12d7d71e2b05bc5acde804e"}
|
||||||
|
{"datasource":"eosc________::desy::desy.pan_gitlab","master":"10|eosc________::f13cefc9f3207cb82f3285b05f190f78"}
|
||||||
|
{"datasource":"eosc________::desy::desy.pan_notebook","master":"10|eosc________::500fe61cce6562797cd43797aab12be5"}
|
||||||
|
{"datasource":"eosc________::digitalglobe::digitalglobe.earthwatch","master":"10|eosc________::020d905260267066c1926f526bb86f30"}
|
||||||
|
{"datasource":"eosc________::dkrz::dkrz.enes_climate_analytics_service","master":"10|eosc________::1d7a1fea6694d15d9e67f08e1e77082b"}
|
||||||
|
{"datasource":"eosc________::doabf::doabf.operas_certification","master":"10|eosc________::79b9748edeffb872a28660a9d238dcec"}
|
||||||
|
{"datasource":"eosc________::ds-wizard::ds-wizard.data_stewardship_wizard","master":"10|eosc________::fc6bad963e15e218efc62c7befd122af"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.check-in","master":"10|eosc________::baa3c497b9499b3d8c87ea8d2b37a44f"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.cloud_compute","master":"10|eosc________::b1179384a336d409fc909fe3711d3d1f"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.cloud_container_compute","master":"10|eosc________::a66bb1ac56a3bcf2c24b0ef85ed2bdfc"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.data_transfer","master":"10|eosc________::6c0bf38e885c42161b88093517f6cd3e"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.egi_datahub","master":"10|eosc________::5a260dae80795584ac08df133adb1fad"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.fitsm_training","master":"10|eosc________::927b4455c0a21692d2a9f634bccd8309"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.high-throughput_compute","master":"10|eosc________::e27ec11ac7b7d6ffbbce668b7d1f81d5"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.iso_27001_training","master":"10|eosc________::98a6655b6421166c5c29baa2f5815de3"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.notebook","master":"10|eosc________::1d37909a6a31147a09ee9f2e579a6706"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.online_storage","master":"10|eosc________::d8b94284582d3e2185a782ae2ba42186"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.training_infrastructure","master":"10|eosc________::38cdb8e44638f2e561c466f0dd26cf96"}
|
||||||
|
{"datasource":"eosc________::egi-fed::egi-fed.workload_manager","master":"10|eosc________::ff515071cd88afb40599edcb6637f47e"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.begrenzte_flucht","master":"10|eosc________::01d1445605fc1d25e6a7f21ba995d724"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.diplomatic_reports","master":"10|eosc________::11714353d2ed069ca30b177d4b4d9e0f"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.early_holocaust_testimony","master":"10|eosc________::0a4974b0bb295b98f88cb7c793f91c17"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.ehri_document_blog","master":"10|eosc________::fb9291f8dac099986eafe957b169ed97"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.international_research_portal_for_records_related_to_nazi-era_cultural_property","master":"10|eosc________::01c5b10e57f9cbb4f3125f427375487e"}
|
||||||
|
{"datasource":"eosc________::ehri::ehri.the_ehri_portal","master":"10|eosc________::6ad4d5352fd192b5fecd76bbd7a7e8b7"}
|
||||||
|
{"datasource":"eosc________::eiscat::eiscat.eiscat_data_access_portal","master":"10|eosc________::0f06a55c8333ae4d197c1d263b2be6ba"}
|
||||||
|
{"datasource":"eosc________::elixir-italy::elixir-italy.laniakea_recas","master":"10|eosc________::01e84abe377339ea57ed521ac39130e9"}
|
||||||
|
{"datasource":"eosc________::elixir-uk::elixir-uk.cyverse_uk","master":"10|eosc________::6a6a05847befec6587bef7673112f5e5"}
|
||||||
|
{"datasource":"eosc________::elixir-uk::elixir-uk.workflowhub","master":"10|fairsharing_::c8cd63e1bf13c5016881652983fb615a"}
|
||||||
|
{"datasource":"eosc________::elsevier::elsevier.digital_commons","master":"10|eosc________::67d38b6a1f43184676b113369554676b"}
|
||||||
|
{"datasource":"eosc________::embl-ebi::embl-ebi.embassy_cloud","master":"10|eosc________::7f8b24797312b851916ee1be0f836de6"}
|
||||||
|
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg","master":"10|eosc________::564e9f467aad251143e12e2e6ec19768"}
|
||||||
|
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg_central_registry","master":"10|eosc________::441caf7eaa4a6602aceae36b2697b924"}
|
||||||
|
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg_resolution_services","master":"10|eosc________::8df6273a1cb2289dbbe3a4b5fe05aa53"}
|
||||||
|
{"datasource":"eosc________::emso_eric::emso_eric.emso_eric_data_portal","master":"10|eosc________::94a41630bd9ddea4a88ec0bfba1b9d95"}
|
||||||
|
{"datasource":"eosc________::enermaps::enermaps.enermaps_data_management_tool","master":"10|eosc________::11496ee8a69b4b955200da7f2c12fe3b"}
|
||||||
|
{"datasource":"eosc________::enhancer::enhancer.openrdmeu","master":"10|eosc________::04820bece2545235144903dec056bcbd"}
|
||||||
|
{"datasource":"eosc________::enhancer::enhancer.swiss_escience_grid_certificates","master":"10|eosc________::4968516eb3b1ad6d883e74a84827e963"}
|
||||||
|
{"datasource":"eosc________::eodc::eodc.data_catalogue_service","master":"10|eosc________::21c44a2b6946e02300dbe36a8edec650"}
|
||||||
|
{"datasource":"eosc________::eodc::eodc.jupyterhub_for_global_copernicus_data","master":"10|eosc________::f99ccd68bf3de6a0a3b0db3441a41bbd"}
|
||||||
|
{"datasource":"eosc________::eosc-dih::eosc-dih.piloting_and_co-design_of_the_business_pilots","master":"10|eosc________::178f3e4832afe9e477d761d2f3d95f85"}
|
||||||
|
{"datasource":"eosc________::eox::eox.edc_eoxhub_workspace","master":"10|eosc________::d71468878e069cf484fc988d276c6d9a"}
|
||||||
|
{"datasource":"eosc________::esa-int::esa-int.geoss_web_portal","master":"10|eosc________::d7bac1ce234c20e3ab43a74eefa34782"}
|
||||||
|
{"datasource":"eosc________::esrf::esrf.the_european_synchrotron_radiation_facility_data_portal","master":"10|fairsharing_::2996962656838a97af4c5f926fe6f1b0"}
|
||||||
|
{"datasource":"eosc________::ess::ess.pan-learning-org","master":"10|eosc________::1298286d3a7cc48fa525b118218c7836"}
|
||||||
|
{"datasource":"eosc________::ess_eric::ess_eric.european_social_survey_ess_as_a_service","master":"10|eosc________::faa60b95b602690861be9305812a5c07"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2access","master":"10|eosc________::4dee0695b946b545dc8d52c56598fbbf"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2drop","master":"10|eosc________::4c6a514f1392ac1d159214e61785849a"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2find","master":"10|eosc________::6069f46dfcc89ccf8043581c9034558e"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2handle","master":"10|eosc________::a23be7f6265fd1ad957eed16b5c8bdc4"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2note","master":"10|eosc________::dfd1d6816b4182e25e84f6cf10d108ed"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2safe","master":"10|re3data_____::a632666349a0bb9a36096c9e152d34cc"}
|
||||||
|
{"datasource":"eosc________::eudat::eudat.b2share","master":"10|eosc________::f959324bdb00f052d547b95da205062f"}
|
||||||
|
{"datasource":"eosc________::eurac::eurac.edp-portal_-_metadata_catalogue_of_eurac_research","master":"10|eosc________::274d73061a925a29d8743b3e1022d0dc"}
|
||||||
|
{"datasource":"eosc________::europeana::europeana.europeana_apis","master":"10|eosc________::91de8c90ebde3dc1c8d41f339fe3fac7"}
|
||||||
|
{"datasource":"eosc________::exoscale::exoscale.european_cloud_hosting","master":"10|eosc________::12b7e6fef784084b817a42f2990fe3f2"}
|
||||||
|
{"datasource":"eosc________::expertai::expertai.document_enrichment_api","master":"10|eosc________::6812b902471f12506c8e6441195aff57"}
|
||||||
|
{"datasource":"eosc________::expertai::expertai.recommender_api","master":"10|eosc________::c40634543c1217686f0a8f5e8592d100"}
|
||||||
|
{"datasource":"eosc________::expertai::expertai.search_api","master":"10|eosc________::79440bc8082949f56cbabef796cec7f1"}
|
||||||
|
{"datasource":"eosc________::fairdi::fairdi.nomad_repository","master":"10|eosc________::b9000c95a6fde9930ae74f4071e14cb2"}
|
||||||
|
{"datasource":"eosc________::figshare::figshare.figshare","master":"10|eosc________::5e6bd062c6b85e2d176b2e61636b8971"}
|
||||||
|
{"datasource":"eosc________::forschungsdaten::forschungsdaten.forschungsdateninfo","master":"10|eosc________::c9185fdb68af7d515e56054da546bc94"}
|
||||||
|
{"datasource":"eosc________::forth::forth.openbioeu","master":"10|eosc________::2db71171816e994877fb960b9fcd89f2"}
|
||||||
|
{"datasource":"eosc________::fssda::fssda.data_service_portal_aila","master":"10|eosc________::ef1f75ea6d244563bc6cfb0c3d3affa4"}
|
||||||
|
{"datasource":"eosc________::fssda::fssda.kuha2_metadata_server","master":"10|eosc________::b6af28d7c292dbbe816cd0d6a9a66f16"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.collections_registry","master":"10|eosc________::ac6da0cfbd07f8605c57a799c41dc947"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.e-Learning_platform","master":"10|eosc________::9059ca88ca8292881ffba9ad8d943d04"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.images_portal","master":"10|eosc________::6991e5dd230956156129669934798cd8"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.occurrence_records","master":"10|eosc________::948a9a53e2a9c94d32f99785eccff662"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.regions_module","master":"10|eosc________::11189c308854c8d8113161edc7fbd3de"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.spatial_portal","master":"10|eosc________::665f73f5e4b6a3693fec9426a6ce6ae8"}
|
||||||
|
{"datasource":"eosc________::gbif-es::gbif-es.species_portal","master":"10|eosc________::9fe2f2ccb3d17452bd6e7424f60340ce"}
|
||||||
|
{"datasource":"eosc________::gbif::gbif.gbif_species_occurrence_data","master":"10|fairsharing_::6e5025ccc7d638ae4e724da8938450a6"}
|
||||||
|
{"datasource":"eosc________::gbif_portugal::gbif_portugal.gbif_portugal_occurrence_records","master":"10|eosc________::fcd4f4efdecb4e675fdee043043f69fc"}
|
||||||
|
{"datasource":"eosc________::gcc_umcg::gcc_umcg.molgenis","master":"10|eosc________::7f255ebbb3715f258e8d7c470209e675"}
|
||||||
|
{"datasource":"eosc________::geant::geant.clouds_service_infrastructure_as_a_service","master":"10|eosc________::7debc69506a8019515d350707e8c82d7"}
|
||||||
|
{"datasource":"eosc________::geant::geant.edugain","master":"10|eosc________::3ded12106e7e870242f7ec39345b3b97"}
|
||||||
|
{"datasource":"eosc________::geant::geant.edumeet_-_webbased_videoconferencing_platform","master":"10|eosc________::dcf8b262f7f61d44eedf409a29d30abc"}
|
||||||
|
{"datasource":"eosc________::geant::geant.eduroam","master":"10|eosc________::e7fd04aab1f224aaa2b5d3478694748b"}
|
||||||
|
{"datasource":"eosc________::geant::geant.eduteams","master":"10|eosc________::f3b04fa1e741f17a842fcbea35e04318"}
|
||||||
|
{"datasource":"eosc________::geant::geant.eduvpn_-_access_your_institutes_network_or_the_internet_using_an_encrypted_connection","master":"10|eosc________::aeb7c573f2742ec5ef8b7332b6b614cb"}
|
||||||
|
{"datasource":"eosc________::geant::geant.inacademia","master":"10|eosc________::26cb3be539a5bbb25533d3b1bdb9d6aa"}
|
||||||
|
{"datasource":"eosc________::geant::geant.ip","master":"10|eosc________::59cd8dbce2703f4eea69a54a959aae89"}
|
||||||
|
{"datasource":"eosc________::geant::geant.l3vpn","master":"10|eosc________::1e70cff61071ce42baffa6dafaf3165e"}
|
||||||
|
{"datasource":"eosc________::geant::geant.lambda","master":"10|eosc________::20a8114b376bf4c455c034b7b4513805"}
|
||||||
|
{"datasource":"eosc________::geant::geant.mdvpn","master":"10|eosc________::54fbf0ac4e42a2ce51e400d9783b51ba"}
|
||||||
|
{"datasource":"eosc________::geant::geant.open","master":"10|eosc________::9ae24d8c63e9ff986fbd20705b334919"}
|
||||||
|
{"datasource":"eosc________::geant::geant.perfsonar","master":"10|eosc________::1bdda4f743377914fabd0f365a8b6ee2"}
|
||||||
|
{"datasource":"eosc________::geant::geant.plus","master":"10|eosc________::eef45e860d52aff4932f254599d5b713"}
|
||||||
|
{"datasource":"eosc________::geant::geant.transits_training","master":"10|eosc________::831e2b596060c60d7d4bc79c200a2254"}
|
||||||
|
{"datasource":"eosc________::geant::geant.trusted_certificate_service","master":"10|eosc________::30817adfb6c625d7fd36b657e2fabc74"}
|
||||||
|
{"datasource":"eosc________::geant::geant.wifimon","master":"10|eosc________::6116f3b14f34658593529f6810068c4e"}
|
||||||
|
{"datasource":"eosc________::genias::genias.e-irg_knowledge_base","master":"10|eosc________::ddc5ab67fed353917716eb2d5c86ce68"}
|
||||||
|
{"datasource":"eosc________::gesis::gesis.doi_registration_service","master":"10|eosc________::71f37a7ebd8495a59c46e637ee5463da"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.agora_resource_portfolio_management_tool","master":"10|eosc________::461aa754c52b7eed605f9e0955470de5"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.argo_monitoring_engine","master":"10|eosc________::e91a3b4dfb62113b9b67b0ac97e566b4"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.aris","master":"10|eosc________::6b381464ec768e3cf55ccacdb00b5988"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.aris_-_archival_service","master":"10|eosc________::32158f91e33cf6fb6c63561cbc7ffd24"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.ni4os-europe_login","master":"10|eosc________::aeaa8f7fc2948930bfa4f970cd96837e"}
|
||||||
|
{"datasource":"eosc________::grnet::grnet.ni4os-europe_repository_service","master":"10|eosc________::d6933cb7acd6fa7a2f7a42562c432fb5"}
|
||||||
|
{"datasource":"eosc________::grycap::grycap.elastic_cloud_compute_cluster","master":"10|eosc________::c6d3c380ce5499d8d20cc9bbeb3b43ff"}
|
||||||
|
{"datasource":"eosc________::grycap::grycap.infrastructure_manager","master":"10|eosc________::e8a2eeb06a205c3299af49f5c233ce16"}
|
||||||
|
{"datasource":"eosc________::grycap::grycap.saps_surface_energy_balance_automated_processing_service","master":"10|eosc________::a7ae875b2487576c35f1bc8e1c857c14"}
|
||||||
|
{"datasource":"eosc________::hn::hn.isidore","master":"10|re3data_____::fabe5c1aaa2e2d4c847e01647b87bf60"}
|
||||||
|
{"datasource":"eosc________::hostkey::hostkey.gpu_servers_grant_program","master":"10|eosc________::d45f87107eb536b4be97e112fac15787"}
|
||||||
|
{"datasource":"eosc________::icos_eric::icos_eric.data_discovery_and_access_portal","master":"10|eosc________::84ada2e91828ce72fa6d02736cdd90f1"}
|
||||||
|
{"datasource":"eosc________::ifca-csic::ifca-csic.deepaas_training_facility","master":"10|eosc________::5414e2342e67d64b11b835e7fd58869d"}
|
||||||
|
{"datasource":"eosc________::ifca-csic::ifca-csic.ifca-csic_cloud_infrastructure","master":"10|eosc________::838e5c334e8115e4831d5f21435aa19b"}
|
||||||
|
{"datasource":"eosc________::ifca-csic::ifca-csic.plant_classification","master":"10|eosc________::32c26f83acaef8d89cc6c7a2f8abd198"}
|
||||||
|
{"datasource":"eosc________::ifca-csic::ifca-csic.remote_monitoring_and_smart_sensing","master":"10|eosc________::0335d29ec68ef9ebad8326cba79455f2"}
|
||||||
|
{"datasource":"eosc________::ifin-hh::ifin-hh.cloudifin","master":"10|eosc________::04d791df0b61b0f5060f241c70924991"}
|
||||||
|
{"datasource":"eosc________::iisas::iisas.dynamic_dns_service","master":"10|eosc________::2381e3b55d048130f2dffd437123d501"}
|
||||||
|
{"datasource":"eosc________::iisas::iisas.fedcloudclient_egi_fedcloud_client","master":"10|eosc________::3668885b6512a039673b9f4638c88600"}
|
||||||
|
{"datasource":"eosc________::iisas::iisas.modelling_service_for_water_supply_systems","master":"10|eosc________::b1d6d2cebddf52f6647102a30690fba9"}
|
||||||
|
{"datasource":"eosc________::ill::ill.ill_data_portal","master":"10|eosc________::714498cf1efec13c2206db4b1e4f1c30"}
|
||||||
|
{"datasource":"eosc________::ill::ill.panosc_software_catalogue","master":"10|eosc________::bc63c5a78abd38a7d9df043e0853a9ce"}
|
||||||
|
{"datasource":"eosc________::inaf::inaf.space-ml_caesar_service","master":"10|eosc________::ba42c5e4332ff16c6cd28573012bc2f9"}
|
||||||
|
{"datasource":"eosc________::inaf::inaf.space-vis_vialactea_service","master":"10|eosc________::ce2ca563bceae686b763326ed53e7b54"}
|
||||||
|
{"datasource":"eosc________::infn::infn.dynamic_on_demand_analysis_service","master":"10|eosc________::f884894e05c5a54646f0b5715e5495d6"}
|
||||||
|
{"datasource":"eosc________::infn::infn.fgsg_science_software_on_demand","master":"10|eosc________::452af4e76a64b6ee7e4bdc86527687f7"}
|
||||||
|
{"datasource":"eosc________::infn::infn.indigo_identity_and_access_management","master":"10|eosc________::d23115c40a4e256725f140330d001861"}
|
||||||
|
{"datasource":"eosc________::infn::infn.infn-cloud_object_storage_dice","master":"10|eosc________::fe0c28e8657cb84e3b775156106c03d1"}
|
||||||
|
{"datasource":"eosc________::infn::infn.paas_orchestrator","master":"10|eosc________::146240bb16057a93e11631edee570f76"}
|
||||||
|
{"datasource":"eosc________::infrafrontier::infrafrontier.training_in_mouse_functional_genomics","master":"10|eosc________::64d6597d10f4e617152f4a612a87eaba"}
|
||||||
|
{"datasource":"eosc________::inria::inria.software_heritage_archive","master":"10|fairsharing_::2c758933af02c0b301906f2819ae1268"}
|
||||||
|
{"datasource":"eosc________::jelastic::jelastic.platform-as-a-service","master":"10|eosc________::bfcae4ab00df41a3c43efbb879586e8f"}
|
||||||
|
{"datasource":"eosc________::kit::kit.eosc-performance","master":"10|eosc________::e52ab75587c1dd98db80568197f04586"}
|
||||||
|
{"datasource":"eosc________::kit::kit.o3as_ozone_assessment","master":"10|eosc________::aaf27a5f35a790617247abecd84b100f"}
|
||||||
|
{"datasource":"eosc________::komanord::komanord.guardomic","master":"10|eosc________::b1e06c9d2c472e9441ee72e83a934d40"}
|
||||||
|
{"datasource":"eosc________::lago::lago.onedatasim","master":"10|eosc________::2b2163e8b82320fed69a017a3e5fb657"}
|
||||||
|
{"datasource":"eosc________::lifewatch-eric::lifewatch-eric.plants_identification_app","master":"10|eosc________::6fc6ed0894391496d3c4967d45933d1a"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.elixirfm","master":"10|eosc________::6dd7c323776a028cef0619cb34bdf48c"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.ker_-_keyword_extractor","master":"10|eosc________::09915f038900aa43cb0c76aa89f10cda"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.lindatclariah-cz_repository","master":"10|eosc________::3daee6a29fb1d9a0f624cdd5973c33ea"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.machine_translation","master":"10|eosc________::3ae4551729381cfd03c433fb0de0c971"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.morphodita","master":"10|eosc________::f2ceebdc1a41d65504ff27f7297c833b"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.nametag","master":"10|eosc________::71e3226e7a868e2215335ffb29073285"}
|
||||||
|
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.udpipe_tool_for_lemmatization_morphological_analysis_pos_tagging_and_dependency_parsing_in_multiple_languages","master":"10|eosc________::2dfc64c2951d9be3f1e2b576633ea425"}
|
||||||
|
{"datasource":"eosc________::lnec::lnec-pt.opencoasts_portal","master":"10|eosc________::7e99655aeda0b5f06efb3eea424dff54"}
|
||||||
|
{"datasource":"eosc________::lnec::lnec.worsica_-_water_monitoring_sentinel_cloud_platform","master":"10|eosc________::c2f55ab774c3cbbd9a330eebaa74dc36"}
|
||||||
|
{"datasource":"eosc________::materialscloud::materialscloud.aiiDA_lab","master":"10|eosc________::dfd970a812cf2e0298eb28c681bc109f"}
|
||||||
|
{"datasource":"eosc________::materialscloud::materialscloud.materials_cloud_archive","master":"10|fairsharing_::a431d70133ef6cf688bc4f6093922b48"}
|
||||||
|
{"datasource":"eosc________::meeo::meeo.adam_platform","master":"10|eosc________::b17fedb87dd9985b6a5e51db593446d6"}
|
||||||
|
{"datasource":"eosc________::meeo::meeo.adam_space","master":"10|eosc________::24bfbca4cf4fedc5a4a662fe67a30d7e"}
|
||||||
|
{"datasource":"eosc________::mobile_observation_integration_service::mobile_observation_integration_service.dark_sky_meter_datasource","master":"10|eosc________::160638e73224aeb7e4f98fd237672919"}
|
||||||
|
{"datasource":"eosc________::msw::msw.polaris_os","master":"10|eosc________::12348ba5b2c5902fd400cb3f1ab773ee"}
|
||||||
|
{"datasource":"eosc________::obp::obp.thoth","master":"10|eosc________::680198ec3f51a744de8a7603d542a0e1"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.amnesia","master":"10|eosc________::ac57e2dd5b3ee01909d7a592523bb96f"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.argos","master":"10|eosc________::92145beb3257af0510ee61ef10d16870"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.broker","master":"10|eosc________::c8c6e8d211d6df4ee8a187fa1134bd92"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.data_provider_dashboard","master":"10|eosc________::809d4c77a7acf9ac0cc2990d4264ae51"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.digital_humanities_and_cultural_heritage_openaire_community_gateway","master":"10|eosc________::b9110e9735dd467abc969fe8e2f1efa3"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.discovery_portal","master":"10|eosc________::992052173b689c8cea94e8e8d99f0238"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.european_marine_science_openaire_dashboard","master":"10|eosc________::950a99851df85c90ec2e933e1d55e164"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.funder_dashboard","master":"10|eosc________::196eea80ab9d73766cd2e8b6ab85872f"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.graph","master":"10|eosc________::c122caed52a88b57732b814a74141000"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.greek_sustainable_development_solutions_network_sdsn_openaire_dashboard","master":"10|eosc________::8100e41e3a5b18170bc5ede2cc393331"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.inference","master":"10|eosc________::c491811e9a6afa69cdcab0f92fca6f7b"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.neuroinformatics_openaire_dashboard","master":"10|eosc________::6e3adcce4d0d4229a9749584dfd5e7a8"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.open_science_helpdesk","master":"10|eosc________::d66db88d4c6c354fe7ebcd4c3dce334e"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.open_science_observatory","master":"10|eosc________::441ee64860eb79808b7cf0bb08262be6"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.open_science_training","master":"10|eosc________::99847506cdff50afa4945d60a9661ea3"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.openaire_login","master":"10|eosc________::818973a9375c0fa545499e1bb9ad0ab2"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.openapc","master":"10|eosc________::a28cc193bc938573e892b8aad0017702"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.research_community_dashboard","master":"10|eosc________::e1a866322f76407fb161a253dc5b539c"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.scholexplorer","master":"10|eosc________::6b34adede04121175566ef8c70f1e520"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.technical_support_towards_openaire_compliance","master":"10|eosc________::cdb8e94b386f9b6780a47194bd1bc7f7"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.topos_observatory_for_organisations","master":"10|eosc________::a7d2b95257273b5ea3f3a23fd8a60d48"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.usage_statistics","master":"10|eosc________::8aa345dc7321fc97906bf4c193a05a8f"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.validator","master":"10|eosc________::f2c13efbaa2a33af3e4e6a54805ac379"}
|
||||||
|
{"datasource":"eosc________::openaire::openaire.zenodo","master":"10|opendoar____::358aee4cc897452c00244351e4d91f69"}
|
||||||
|
{"datasource":"eosc________::openbiomaps::openbiomaps.openbiomaps","master":"10|eosc________::32edf5a4edbdea0899d6ba588d083efd"}
|
||||||
|
{"datasource":"eosc________::openedition::openedition.operas_research_for_society","master":"10|eosc________::2cdf4f57007b990b7ad7a884796f9b15"}
|
||||||
|
{"datasource":"eosc________::openknowledgemaps::openknowledgemaps.open_knowledge_maps","master":"10|eosc________::f3819d0f8e8bf57d383b23d31a3c0099"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.builder_of_tdm_applications","master":"10|eosc________::fdd26c19dd490260bc6c48b5813f4ac3"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.catalogue_of_ancillary_resources","master":"10|eosc________::ab4e37e85a1975b204b66683ed3888a8"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.catalogue_of_corpora","master":"10|eosc________::2cf744a594ea30fd31e976bffa8f2b71"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.catalogue_of_tdm_applications","master":"10|eosc________::ef5f343c5cf11fa2d40407ec308bb34a"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.catalogue_of_tdm_components","master":"10|eosc________::4275243a94677f19a5b74e5afb1f94cf"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.consulting_on_licences_for_tdm","master":"10|eosc________::522000b4c90b209aa7be961449ca910f"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.corpus_builder_for_scholarly_works","master":"10|eosc________::c64725d47af63bc2114b4214b684a392"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.support_and_training","master":"10|eosc________::84501ff99e5e429f5f083ab8ca0be7e4"}
|
||||||
|
{"datasource":"eosc________::openminted::openminted.tdm_applications_executor","master":"10|eosc________::e9ae655ce2ff1eaa19d0b3475ce5e660"}
|
||||||
|
{"datasource":"eosc________::operas::operas.gotriple_discovery_platform","master":"10|eosc________::f687e24dc56aaeeb561c95865a5071cc"}
|
||||||
|
{"datasource":"eosc________::operas::operas.operas_metrics_service","master":"10|eosc________::5960e1289f623625210f720c6173592d"}
|
||||||
|
{"datasource":"eosc________::oslo_university::oslo_university.services_for_sensitive_data_tsd","master":"10|eosc________::743b01351510f88e24be1c700c581f68"}
|
||||||
|
{"datasource":"eosc________::osmooc::osmooc.open_science_mooc","master":"10|eosc________::e101101e8653b6607a3ad9fea3b7d1fe"}
|
||||||
|
{"datasource":"eosc________::oxford_e-research_centre::oxford_e-research_centre.fairsharing","master":"10|openaire____::bf5a61cc330e21ffa90eed3eb1533466"}
|
||||||
|
{"datasource":"eosc________::phenomenal::phenomenal.phenomenal","master":"10|eosc________::79e19b14aeee0d94e9a79110a6e6ad32"}
|
||||||
|
{"datasource":"eosc________::plantnet::plantnet.plntnet_identification_service","master":"10|eosc________::5ce89743eafdd8578591d84150f547e4"}
|
||||||
|
{"datasource":"eosc________::prace::prace.application_enabling_support","master":"10|eosc________::c87fd74ed685337fdbcff504373fc513"}
|
||||||
|
{"datasource":"eosc________::prace::prace.code_vault","master":"10|eosc________::dbab7889c81b59ec753040a762f6569a"}
|
||||||
|
{"datasource":"eosc________::prace::prace.deci_access","master":"10|eosc________::c7cedb82b1beea5382601d48807212aa"}
|
||||||
|
{"datasource":"eosc________::prace::prace.mooc","master":"10|eosc________::d6ff8167d31dccebe33a272513422b53"}
|
||||||
|
{"datasource":"eosc________::prace::prace.patc","master":"10|eosc________::1ab1b123bd559ee7f7c7ec2ee353f0c0"}
|
||||||
|
{"datasource":"eosc________::prace::prace.preparatory_access","master":"10|eosc________::39430adf529f1ab9e33da444b3708fcf"}
|
||||||
|
{"datasource":"eosc________::prace::prace.project_access","master":"10|eosc________::b58e957946983b686c76ee19dfab8d70"}
|
||||||
|
{"datasource":"eosc________::prace::prace.ptc","master":"10|eosc________::b3ca18e8884bfe2422d3723313fef79c"}
|
||||||
|
{"datasource":"eosc________::prace::prace.seasonal_schools_and_international_summer_school","master":"10|eosc________::590c71318d9d94c32981e3195567d546"}
|
||||||
|
{"datasource":"eosc________::prace::prace.shape","master":"10|eosc________::38b5a26f74e4808270a2d4f305d2f3a5"}
|
||||||
|
{"datasource":"eosc________::prace::prace.training_portal","master":"10|eosc________::25966a269ab2343ac9c4d982c341d87f"}
|
||||||
|
{"datasource":"eosc________::predictia::predictia.climadjust","master":"10|eosc________::14743eb22da3524893784faf409aac70"}
|
||||||
|
{"datasource":"eosc________::psi::psi.psi_public_data_repository","master":"10|re3data_____::1e55174ff77ed2d804871281201dbb50"}
|
||||||
|
{"datasource":"eosc________::psi::psi.remote_desktop_service","master":"10|eosc________::c82e26eb6e65d008de03b349dffc11fc"}
|
||||||
|
{"datasource":"eosc________::psnc::psnc.rohub","master":"10|eosc________::c87f08707b5235172e85b374e39a82dc"}
|
||||||
|
{"datasource":"eosc________::psnc::psnc.symbiote","master":"10|eosc________::ef0cd965a0d0a3df80ecfae4b3b08aad"}
|
||||||
|
{"datasource":"eosc________::rasdaman::rasdaman.datacube","master":"10|eosc________::bb1678f7b15d8c15fde6e240a4f95f93"}
|
||||||
|
{"datasource":"eosc________::rbi::rbi.dariah_science_gateway","master":"10|eosc________::b51b448421d926293b3781f4ac90f4f4"}
|
||||||
|
{"datasource":"eosc________::readcoop::readcoop.transkribus","master":"10|eosc________::a80411026809e6eaa896439e1b9764f4"}
|
||||||
|
{"datasource":"eosc________::rli::rli.open_energy_platform","master":"10|fairsharing_::0cbed40c0d920b94126eaf5e707be1f5"}
|
||||||
|
{"datasource":"eosc________::ror-org::ror-org.identifier","master":"10|eosc________::6fe92c2346db22322ddf6b677d449b0e"}
|
||||||
|
{"datasource":"eosc________::sciences_po::sciences_po.ethnic_and_migrant_minority_survey_registry","master":"10|eosc________::0cde986dc2bf015912e407f0f83ee402"}
|
||||||
|
{"datasource":"eosc________::sciences_po::sciences_po.wpss_for_ess","master":"10|eosc________::9a5bb11c495443aad944b04f5fcb5c07"}
|
||||||
|
{"datasource":"eosc________::scigne::scigne.cloud_compute","master":"10|eosc________::7c63e3284c36b5977c553192dce506b3"}
|
||||||
|
{"datasource":"eosc________::scipedia::scipedia.scipedia","master":"10|eosc________::850abcddc76069f2c3c1cf77ad4beec9"}
|
||||||
|
{"datasource":"eosc________::scipedia::scipedia.topos_for_individuals","master":"10|eosc________::e6214b58f39a25b53eecda340f95ee7b"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.doi_minting_service","master":"10|eosc________::f87f72147a3c82c4f77684e40101e90e"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_environmental_data_edmed","master":"10|eosc________::d79706389f0b864306feb47aac1f5766"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_environmental_research_projects","master":"10|eosc________::baa9d2d6cdd8507fcbf76242e4c25d76"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_organisations_edmo","master":"10|eosc________::5d23c66c26e0df209fc415c1e9ad0316"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_the_cruise_summary_reports_csr","master":"10|eosc________::fd70912c66037dc11f710587e281eeaf"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_the_initial_ocean-observing_systems_edios","master":"10|eosc________::846016e987d1feaf2a36083f88dba1f2"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.seadatanet_cdi","master":"10|eosc________::36cd158d6b1bbdbfb443c68b8da00335"}
|
||||||
|
{"datasource":"eosc________::seadatanet::seadatanet.vocabulary_services_-_underpinned_by_the_nerc_vocabulary_server_nvs","master":"10|eosc________::4416d18ec7a57e553979fbfa4d862483"}
|
||||||
|
{"datasource":"eosc________::sinergise::sinergise.sentinel_hub","master":"10|eosc________::d36ae944fa207461bcb7b2b3a6c94de8"}
|
||||||
|
{"datasource":"eosc________::sixsq::sixsq.nuvla_multi-cloud_application_management_platform","master":"10|eosc________::38438cc3190a3815359efb53b9dd98eb"}
|
||||||
|
{"datasource":"eosc________::sks::sks.digital_production_for_conferences_workshops_roundtables_and_other_academic_and_professional_events","master":"10|eosc________::f6b51bef4a5f1478e980673339f2b2f3"}
|
||||||
|
{"datasource":"eosc________::smartsmear::smartsmear.smartsmear","master":"10|eosc________::d17a9325ca64ffad59e04659ed5404f7"}
|
||||||
|
{"datasource":"eosc________::sobigdata::sobigdata.tagme","master":"10|eosc________::0c3b8b80d9d6d38effd28bfa6a140a12"}
|
||||||
|
{"datasource":"eosc________::suite5::suite5.furniture_enterprise_analytics","master":"10|eosc________::29ed60070bd91bdc19c9f278b104465c"}
|
||||||
|
{"datasource":"eosc________::switch::switch.switchengines","master":"10|eosc________::d4143918a810115206640cfeb11e0ba6"}
|
||||||
|
{"datasource":"eosc________::t-systems::t-systems.open_telekom_cloud","master":"10|eosc________::c489ef6564a47922359f7b833919d642"}
|
||||||
|
{"datasource":"eosc________::terradue::terradue.eo_services_for_earthquake_response_and_landslides_analysis","master":"10|eosc________::ab3140d145deb5fdb02eeefbc5ebc471"}
|
||||||
|
{"datasource":"eosc________::tib::tib.open_research_knowledge_graph_orkg","master":"10|eosc________::ed6bd695c7a99297f360bc2fc915be90"}
|
||||||
|
{"datasource":"eosc________::ubora::ubora.ubora","master":"10|eosc________::bacf05aff1c6dcf3133a0352d5eb14c4"}
|
||||||
|
{"datasource":"eosc________::ubora::ubora.ubora_e-platform","master":"10|eosc________::947fde33605ba61216a07135ee1551f2"}
|
||||||
|
{"datasource":"eosc________::ugr-es::ugr-es.glacier_lagoons_of_sierra_nevada","master":"10|eosc________::8a966c0efca298ad5ec130d323c29935"}
|
||||||
|
{"datasource":"eosc________::uit::uit.dataverseno","master":"10|eosc________::92b76aa81a5b8443fcf17d3ae3c34211"}
|
||||||
|
{"datasource":"eosc________::uit::uit.the_troms_repository_of_language_and_linguistics_trolling","master":"10|fairsharing_::a36b0dcd1e6384abc0e1867860ad3ee3"}
|
||||||
|
{"datasource":"eosc________::ukaea::ukaea.prominence","master":"10|eosc________::06ce999c7cf77ea5a65f87bb563cd625"}
|
||||||
|
{"datasource":"eosc________::ukri_-_stfc::ukri_-_stfc.cvmfs_test","master":"10|eosc________::53aaa0a24d0edc47c23e722135c29dde"}
|
||||||
|
{"datasource":"eosc________::ukri_-_stfc::ukri_-_stfc.rucio","master":"10|eosc________::c19a8251c6bf563365c555572ace903e"}
|
||||||
|
{"datasource":"eosc________::uni-freiburg::uni-freiburg.european_galaxy_server","master":"10|eosc________::cc00fc2385475b80accec001dfb85efb"}
|
||||||
|
{"datasource":"eosc________::unibo::unibo.opencitations","master":"10|eosc________::573c29ecaf76ab961743bfc8a7d911ec"}
|
||||||
|
{"datasource":"eosc________::unifl::unifl.snap4city","master":"10|eosc________::9a55c40c3c082b7a8352ecbc56a87996"}
|
||||||
|
{"datasource":"eosc________::unige::unige.astronomical_online_data_analysis_astrooda","master":"10|eosc________::63f6119d3170cccf979daada3c5b524e"}
|
||||||
|
{"datasource":"eosc________::unitartu::unitartu.ut.rocket","master":"10|eosc________::da3450589a9d56212963b20cf729974c"}
|
||||||
|
{"datasource":"eosc________::upv-es::upv-es.lemonade","master":"10|eosc________::afdd227beada491f77d7944d7a0eafc9"}
|
||||||
|
{"datasource":"eosc________::vamdc::vamdc.portal","master":"10|eosc________::4dab2bb6e9a9ad223cd63c62c2ea804e"}
|
||||||
|
{"datasource":"eosc________::vamdc::vamdc.query_store","master":"10|eosc________::33f18bfe544c3c84ac28be6a3292d166"}
|
||||||
|
{"datasource":"eosc________::vamdc::vamdc.species_database","master":"10|eosc________::ae3587682dec5663a1b3b625036d15d0"}
|
||||||
|
{"datasource":"eosc________::vilnius-university::vilnius-university.the_national_open_access_research_data_archive_midas","master":"10|eosc________::4987ee0d071f68cf88f6b1a834b6733f"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.amber-based_portal_server_for_nmr_structures_amps-nmr","master":"10|eosc________::c6cca9747ef3ce296bd626bcbc4e480a"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.disvis_web_portal","master":"10|eosc________::2539ec693b683284c4e243b969ae3fc0"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.fanten_finding_anisotropy_tensor","master":"10|eosc________::99c793e3f3b856c48eaaa36682038b28"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.haddock24_web_portal","master":"10|eosc________::0f198f6a0885105809f420be23614be3"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.metalpdb","master":"10|eosc________::84676bc3d2ce17de70309dc58f428296"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.pdb-tools_web","master":"10|eosc________::b37eed45624ac30f3476f71640e59a61"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.powerfit_web_portal","master":"10|eosc________::93d4d621ed1da378c0e7dc891cefc007"}
|
||||||
|
{"datasource":"eosc________::wenmr::wenmr.spoton","master":"10|eosc________::76e7e0552f9c6b89db94b31ddc366b9f"}
|
|
@ -10,6 +10,7 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
|
@ -81,8 +82,9 @@ public class SparkPrepareResultProject implements Serializable {
|
||||||
Dataset<Relation> relation = Utils
|
Dataset<Relation> relation = Utils
|
||||||
.readPath(spark, inputPath + "/relation", Relation.class)
|
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||||
.filter(
|
.filter(
|
||||||
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
(FilterFunction<Relation>) r -> !r.getDataInfo().getDeletedbyinference() &&
|
||||||
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
r.getRelClass().equalsIgnoreCase(ModelConstants.IS_PRODUCED_BY));
|
||||||
|
|
||||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
|
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
|
||||||
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
||||||
|
|
||||||
|
|
|
@ -7,17 +7,22 @@ import java.io.Serializable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -33,87 +38,83 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
SparkDumpFunderResults.class
|
SparkDumpFunderResults.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/funder_result_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/funder_result_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Boolean isSparkSessionManaged = Optional
|
Boolean isSparkSessionManaged = Optional
|
||||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
.map(Boolean::valueOf)
|
.map(Boolean::valueOf)
|
||||||
.orElse(Boolean.TRUE);
|
.orElse(Boolean.TRUE);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String inputPath = parser.get("sourcePath");
|
final String inputPath = parser.get("sourcePath");
|
||||||
log.info("inputPath: {}", inputPath);
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final String graphPath = parser.get("graphPath");
|
|
||||||
log.info("relationPath: {}", graphPath);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
writeResultProjectList(spark, inputPath, outputPath, graphPath);
|
writeResultProjectList(spark, inputPath, outputPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath,
|
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath) {
|
||||||
String graphPath) {
|
|
||||||
|
|
||||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> project = Utils
|
|
||||||
.readPath(spark, graphPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
|
||||||
|
|
||||||
Dataset<CommunityResult> result = Utils
|
Dataset<CommunityResult> result = Utils
|
||||||
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
||||||
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
|
.union(Utils.readPath(spark, inputPath + "/otherresearchproduct", CommunityResult.class))
|
||||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||||
|
log.info("Number of result {}", result.count());
|
||||||
List<String> funderList = project
|
Dataset<String> tmp = result
|
||||||
.select("id")
|
.flatMap((FlatMapFunction<CommunityResult, String>) cr -> cr.getProjects().stream().map(p -> {
|
||||||
.map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING())
|
return getFunderName(p);
|
||||||
.distinct()
|
}).collect(Collectors.toList()).iterator(), Encoders.STRING())
|
||||||
.collectAsList();
|
.distinct();
|
||||||
|
List<String> funderList = tmp.collectAsList();
|
||||||
funderList.forEach(funder -> {
|
funderList.forEach(funder -> {
|
||||||
String fundernsp = funder.substring(3);
|
dumpResults(funder, result, outputPath);
|
||||||
String funderdump;
|
|
||||||
if (fundernsp.startsWith("corda")) {
|
|
||||||
funderdump = "EC_";
|
|
||||||
if (fundernsp.endsWith("h2020")) {
|
|
||||||
funderdump += "H2020";
|
|
||||||
} else {
|
|
||||||
funderdump += "FP7";
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
funderdump = fundernsp.substring(0, fundernsp.indexOf("_")).toUpperCase();
|
|
||||||
}
|
|
||||||
writeFunderResult(funder, result, outputPath, funderdump);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void dumpResults(String nsp, Dataset<CommunityResult> results, String outputPath,
|
@NotNull
|
||||||
String funderName) {
|
private static String getFunderName(Project p) {
|
||||||
|
Optional<Funder> ofunder = Optional.ofNullable(p.getFunder());
|
||||||
|
if (ofunder.isPresent()) {
|
||||||
|
String fName = ofunder.get().getShortName();
|
||||||
|
if (fName.equalsIgnoreCase("ec")) {
|
||||||
|
fName += "_" + ofunder.get().getFundingStream();
|
||||||
|
}
|
||||||
|
return fName;
|
||||||
|
} else {
|
||||||
|
String fName = p.getId().substring(3, p.getId().indexOf("_")).toUpperCase();
|
||||||
|
if (fName.equalsIgnoreCase("ec")) {
|
||||||
|
if (p.getId().contains("h2020")) {
|
||||||
|
fName += "_H2020";
|
||||||
|
} else {
|
||||||
|
fName += "_FP7";
|
||||||
|
}
|
||||||
|
} else if (fName.equalsIgnoreCase("conicytf")) {
|
||||||
|
fName = "CONICYT";
|
||||||
|
} else if (fName.equalsIgnoreCase("dfgf")) {
|
||||||
|
fName = "DFG";
|
||||||
|
} else if (fName.equalsIgnoreCase("tubitakf")) {
|
||||||
|
fName = "TUBITAK";
|
||||||
|
} else if (fName.equalsIgnoreCase("euenvagency")) {
|
||||||
|
fName = "EEA";
|
||||||
|
}
|
||||||
|
return fName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void dumpResults(String funder, Dataset<CommunityResult> results, String outputPath) {
|
||||||
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
||||||
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
for (Project p : r.getProjects()) {
|
for (Project p : r.getProjects()) {
|
||||||
if (p.getId().startsWith(nsp)) {
|
String fName = getFunderName(p);
|
||||||
if (nsp.startsWith("40|irb")) {
|
if (fName.equalsIgnoreCase(funder)) {
|
||||||
if (p.getFunder().getShortName().equals(funderName))
|
|
||||||
return r;
|
|
||||||
else
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,18 +124,6 @@ public class SparkDumpFunderResults implements Serializable {
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + "/" + funderName);
|
.json(outputPath + "/" + funder);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeFunderResult(String funder, Dataset<CommunityResult> results, String outputPath,
|
|
||||||
String funderDump) {
|
|
||||||
|
|
||||||
if (funder.startsWith("40|irb")) {
|
|
||||||
dumpResults(funder, results, outputPath, "HRZZ");
|
|
||||||
dumpResults(funder, results, outputPath, "MZOS");
|
|
||||||
} else
|
|
||||||
dumpResults(funder, results, outputPath, funderDump);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,12 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
|
@ -18,11 +21,18 @@ import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.Constants;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.ResultMapper;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.community.ResultProject;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Selects the results linked to projects. Only for these results the dump will be performed.
|
* Selects the results linked to projects. Only for these results the dump will be performed.
|
||||||
|
@ -58,8 +68,10 @@ public class SparkResultLinkedToProject implements Serializable {
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
final String graphPath = parser.get("graphPath");
|
final String resultProjectsPath = parser.get("graphPath");
|
||||||
log.info("graphPath: {}", graphPath);
|
log.info("graphPath: {}", resultProjectsPath);
|
||||||
|
|
||||||
|
String communityMapPath = parser.get("communityMapPath");
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
@ -70,43 +82,33 @@ public class SparkResultLinkedToProject implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
writeResultsLinkedToProjects(spark, inputClazz, inputPath, outputPath, graphPath);
|
writeResultsLinkedToProjects(
|
||||||
|
communityMapPath, spark, inputClazz, inputPath, outputPath, resultProjectsPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
private static <R extends Result> void writeResultsLinkedToProjects(String communityMapPath, SparkSession spark,
|
||||||
String inputPath, String outputPath, String graphPath) {
|
Class<R> inputClazz,
|
||||||
|
String inputPath, String outputPath, String resultProjectsPath) {
|
||||||
|
|
||||||
Dataset<R> results = Utils
|
Dataset<R> results = Utils
|
||||||
.readPath(spark, inputPath, inputClazz)
|
.readPath(spark, inputPath, inputClazz)
|
||||||
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
|
||||||
Dataset<Relation> relations = Utils
|
|
||||||
.readPath(spark, graphPath + "/relation", Relation.class)
|
|
||||||
.filter(
|
.filter(
|
||||||
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
(FilterFunction<R>) r -> !r.getDataInfo().getDeletedbyinference() &&
|
||||||
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
!r.getDataInfo().getInvisible());
|
||||||
Dataset<Project> project = Utils.readPath(spark, graphPath + "/project", Project.class);
|
Dataset<ResultProject> resultProjectDataset = Utils
|
||||||
|
.readPath(spark, resultProjectsPath, ResultProject.class);
|
||||||
results.createOrReplaceTempView("result");
|
CommunityMap communityMap = Utils.getCommunityMap(spark, communityMapPath);
|
||||||
relations.createOrReplaceTempView("relation");
|
results
|
||||||
project.createOrReplaceTempView("project");
|
.joinWith(resultProjectDataset, results.col("id").equalTo(resultProjectDataset.col("resultId")))
|
||||||
|
.map((MapFunction<Tuple2<R, ResultProject>, CommunityResult>) t2 -> {
|
||||||
Dataset<R> tmp = spark
|
CommunityResult cr = (CommunityResult) ResultMapper
|
||||||
.sql(
|
.map(
|
||||||
"Select res.* " +
|
t2._1(),
|
||||||
"from relation rel " +
|
communityMap, Constants.DUMPTYPE.FUNDER.getType());
|
||||||
"join result res " +
|
cr.setProjects(t2._2().getProjectsList());
|
||||||
"on rel.source = res.id " +
|
return cr;
|
||||||
"join project p " +
|
}, Encoders.bean(CommunityResult.class))
|
||||||
"on rel.target = p.id " +
|
|
||||||
"")
|
|
||||||
.as(Encoders.bean(inputClazz));
|
|
||||||
tmp
|
|
||||||
.groupByKey(
|
|
||||||
(MapFunction<R, String>) value -> value
|
|
||||||
.getId(),
|
|
||||||
Encoders.STRING())
|
|
||||||
.mapGroups((MapGroupsFunction<String, R, R>) (k, it) -> it.next(), Encoders.bean(inputClazz))
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
|
|
@ -0,0 +1,82 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.dump.projectssubset;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class ProjectsSubsetSparkJob implements Serializable {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ProjectsSubsetSparkJob.class);
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
ProjectsSubsetSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/oa/graph/dump/project_subset_parameters.json"));
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
final String projectListPath = parser.get("projectListPath");
|
||||||
|
log.info("projectListPath: {}", projectListPath);
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
|
getNewProjectList(spark, inputPath, outputPath, projectListPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void getNewProjectList(SparkSession spark, String inputPath, String outputPath,
|
||||||
|
String projectListPath) {
|
||||||
|
Dataset<String> projectList = spark.read().textFile(projectListPath);
|
||||||
|
Dataset<Project> projects;
|
||||||
|
projects = Utils.readPath(spark, inputPath, Project.class);
|
||||||
|
projects
|
||||||
|
.joinWith(projectList, projects.col("id").equalTo(projectList.col("value")), "left")
|
||||||
|
.map((MapFunction<Tuple2<Project, String>, Project>) t2 -> {
|
||||||
|
if (Optional.ofNullable(t2._2()).isPresent())
|
||||||
|
return null;
|
||||||
|
return t2._1();
|
||||||
|
}, Encoders.bean(Project.class))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath);
|
||||||
|
Utils
|
||||||
|
.readPath(spark, outputPath, Project.class)
|
||||||
|
.map((MapFunction<Project, String>) p -> p.getId(), Encoders.STRING())
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Append)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.text(projectListPath);
|
||||||
|
}
|
||||||
|
}
|
|
@ -355,7 +355,7 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
r.setPublisher(preparePublisher(doc, info));
|
r.setPublisher(preparePublisher(doc, info));
|
||||||
r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info));
|
r.setEmbargoenddate(prepareField(doc, "//oaf:embargoenddate", info));
|
||||||
r.setSource(prepareSources(doc, info));
|
r.setSource(prepareSources(doc, info));
|
||||||
r.setFulltext(new ArrayList<>()); // NOT PRESENT IN MDSTORES
|
r.setFulltext(prepareListFields(doc, "//oaf:fulltext", info));
|
||||||
r.setFormat(prepareFormats(doc, info));
|
r.setFormat(prepareFormats(doc, info));
|
||||||
r.setContributor(prepareContributors(doc, info));
|
r.setContributor(prepareContributors(doc, info));
|
||||||
r.setResourcetype(prepareResourceType(doc, info));
|
r.setResourcetype(prepareResourceType(doc, info));
|
||||||
|
|
|
@ -65,11 +65,19 @@ public class MigrateHdfsMdstoresApplication extends AbstractMigrationApplication
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String mdstoreManagerUrl = parser.get("mdstoreManagerUrl");
|
final String mdstoreManagerUrl = parser.get("mdstoreManagerUrl");
|
||||||
|
log.info("mdstoreManagerUrl: {}", mdstoreManagerUrl);
|
||||||
|
|
||||||
final String mdFormat = parser.get("mdFormat");
|
final String mdFormat = parser.get("mdFormat");
|
||||||
|
log.info("mdFormat: {}", mdFormat);
|
||||||
|
|
||||||
final String mdLayout = parser.get("mdLayout");
|
final String mdLayout = parser.get("mdLayout");
|
||||||
|
log.info("mdLayout: {}", mdLayout);
|
||||||
|
|
||||||
final String mdInterpretation = parser.get("mdInterpretation");
|
final String mdInterpretation = parser.get("mdInterpretation");
|
||||||
|
log.info("mdInterpretation: {}", mdInterpretation);
|
||||||
|
|
||||||
final String hdfsPath = parser.get("hdfsPath");
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
|
log.info("hdfsPath: {}", hdfsPath);
|
||||||
|
|
||||||
final Set<String> paths = mdstorePaths(mdstoreManagerUrl, mdFormat, mdLayout, mdInterpretation);
|
final Set<String> paths = mdstorePaths(mdstoreManagerUrl, mdFormat, mdLayout, mdInterpretation);
|
||||||
|
|
||||||
|
@ -95,6 +103,8 @@ public class MigrateHdfsMdstoresApplication extends AbstractMigrationApplication
|
||||||
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
|
||||||
.toArray(size -> new String[size]);
|
.toArray(size -> new String[size]);
|
||||||
|
|
||||||
|
log.info("Processing existing paths {}", Arrays.asList(validPaths));
|
||||||
|
|
||||||
if (validPaths.length > 0) {
|
if (validPaths.length > 0) {
|
||||||
spark
|
spark
|
||||||
.read()
|
.read()
|
||||||
|
|
|
@ -17,10 +17,10 @@
|
||||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"paramName": "gp",
|
"paramName": "gp",
|
||||||
"paramLongName": "graphPath",
|
"paramLongName": "graphPath",
|
||||||
"paramDescription": "the relationPath",
|
"paramDescription": "the relationPath",
|
||||||
"paramRequired": true
|
"paramRequired": false
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName":"s",
|
||||||
|
"paramLongName":"sourcePath",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "out",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the path used to store temporary output files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ssm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -28,6 +28,12 @@
|
||||||
"paramLongName":"graphPath",
|
"paramLongName":"graphPath",
|
||||||
"paramDescription": "the path to the relations",
|
"paramDescription": "the path to the relations",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName":"cmp",
|
||||||
|
"paramLongName":"communityMapPath",
|
||||||
|
"paramDescription": "the path to the relations",
|
||||||
|
"paramRequired": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName":"s",
|
||||||
|
"paramLongName":"sourcePath",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "out",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the path used to store temporary output files",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ssm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pl",
|
||||||
|
"paramLongName": "projectListPath",
|
||||||
|
"paramDescription": "the path of the association result projectlist",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
|
@ -0,0 +1,171 @@
|
||||||
|
<workflow-app name="dump_graph" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>sourcePath</name>
|
||||||
|
<description>the source path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>projectListPath</name>
|
||||||
|
<description>the path to the project list</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>outputPath</name>
|
||||||
|
<description>the output path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>accessToken</name>
|
||||||
|
<description>the access token used for the deposition in Zenodo</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>connectionUrl</name>
|
||||||
|
<description>the connection url for Zenodo</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>metadata</name>
|
||||||
|
<description> the metadata associated to the deposition</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>depositionType</name>
|
||||||
|
<description>the type of deposition we want to perform. "new" for brand new deposition, "version" for a new version of a published deposition (in this case the concept record id must be provided), "upload" to upload content to an open deposition for which we already have the deposition id (in this case the deposition id should be provided)</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>conceptRecordId</name>
|
||||||
|
<description>for new version, the id of the record for the old deposition</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>depositionId</name>
|
||||||
|
<description>the depositionId of a deposition open that has to be added content</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
<start to="dump_project"/>
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
<action name="dump_project">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Dump table project </name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}/project</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/project</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>noneed</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="get_new_projects"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<action name="get_new_projects">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Dump table project </name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectsSubsetSparkJob</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/project</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/tar/project</arg>
|
||||||
|
<arg>--projectListPath</arg><arg>${projectListPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="make_archive"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<action name="make_archive">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
||||||
|
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||||
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/tar</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="send_zenodo"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<action name="send_zenodo">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||||
|
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||||
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
||||||
|
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
||||||
|
<arg>--metadata</arg><arg>${metadata}</arg>
|
||||||
|
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
||||||
|
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
||||||
|
<arg>--depositionId</arg><arg>${depositionId}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -1,347 +0,0 @@
|
||||||
<workflow-app name="sub_dump_community_funder_results" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
<parameters>
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
|
||||||
<description>the source path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<description>the output path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>communityMapPath</name>
|
|
||||||
<description>the path to the community map</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>selectedResults</name>
|
|
||||||
<description>the path the the possible subset ot results to be dumped</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<description>the target hive database name</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<description>hive server jdbc url</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<description>hive server metastore URIs</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="fork_dump"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<fork name="fork_dump">
|
|
||||||
<path start="dump_publication"/>
|
|
||||||
<path start="dump_dataset"/>
|
|
||||||
<path start="dump_orp"/>
|
|
||||||
<path start="dump_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="dump_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table publication for community/funder related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${selectedResults}/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
||||||
<arg>--dumpType</arg><arg>${dumpType}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table dataset for community/funder related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${selectedResults}/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table ORP for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${selectedResults}/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table software for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${selectedResults}/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/software</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_dump" to="prepareResultProject"/>
|
|
||||||
|
|
||||||
<action name="prepareResultProject">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Prepare association result subset of project info</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="fork_extendWithProject"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_extendWithProject">
|
|
||||||
<path start="extend_publication"/>
|
|
||||||
<path start="extend_dataset"/>
|
|
||||||
<path start="extend_orp"/>
|
|
||||||
<path start="extend_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="extend_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped publications with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/ext/publication</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped dataset with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/ext/dataset</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped ORP with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/ext/orp</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped software with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/ext/software</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
<join name="join_extend" to="End"/>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
|
|
||||||
</workflow-app>
|
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
## This is a classpath-based import file (this header is required)
|
|
||||||
dump_common classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/commoncommunityfunder/oozie_app
|
|
|
@ -77,42 +77,259 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="common_action_community_funder"/>
|
<start to="fork_dump"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<action name="common_action_community_funder">
|
<fork name="fork_dump">
|
||||||
<sub-workflow>
|
<path start="dump_publication"/>
|
||||||
<app-path>${wf:appPath()}/dump_common
|
<path start="dump_dataset"/>
|
||||||
</app-path>
|
<path start="dump_orp"/>
|
||||||
<propagate-configuration/>
|
<path start="dump_software"/>
|
||||||
<configuration>
|
</fork>
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
<action name="dump_publication">
|
||||||
<value>${sourcePath}</value>
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
</property>
|
<master>yarn</master>
|
||||||
<property>
|
<mode>cluster</mode>
|
||||||
<name>selectedResults</name>
|
<name>Dump table publication for community/funder related products</name>
|
||||||
<value>${sourcePath}</value>
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||||
</property>
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
<property>
|
<spark-opts>
|
||||||
<name>communityMapPath</name>
|
--executor-memory=${sparkExecutorMemory}
|
||||||
<value>${workingDir}/communityMap</value>
|
--executor-cores=${sparkExecutorCores}
|
||||||
</property>
|
--driver-memory=${sparkDriverMemory}
|
||||||
<property>
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
<name>outputPath</name>
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
<value>${workingDir}</value>
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
</property>
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
</configuration>
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</sub-workflow>
|
</spark-opts>
|
||||||
<ok to="splitForCommunities" />
|
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||||
<error to="Kill" />
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
|
<arg>--dumpType</arg><arg>${dumpType}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_dump"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="dump_dataset">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Dump table dataset for community/funder related products</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/dump/dataset</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_dump"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="dump_orp">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Dump table ORP for community related products</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_dump"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="dump_software">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Dump table software for community related products</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||||
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/dump/software</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_dump"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<join name="join_dump" to="prepareResultProject"/>
|
||||||
|
|
||||||
|
<action name="prepareResultProject">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Prepare association result subset of project info</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="fork_extendWithProject"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<fork name="fork_extendWithProject">
|
||||||
|
<path start="extend_publication"/>
|
||||||
|
<path start="extend_dataset"/>
|
||||||
|
<path start="extend_orp"/>
|
||||||
|
<path start="extend_software"/>
|
||||||
|
</fork>
|
||||||
|
|
||||||
|
<action name="extend_publication">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Extend dumped publications with information about project</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/publication</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/ext/publication</arg>
|
||||||
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_extend"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="extend_dataset">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Extend dumped dataset with information about project</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/dataset</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/ext/dataset</arg>
|
||||||
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_extend"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="extend_orp">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Extend dumped ORP with information about project</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/ext/orp</arg>
|
||||||
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_extend"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="extend_software">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Extend dumped software with information about project</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/dump/software</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/ext/software</arg>
|
||||||
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="join_extend"/>
|
||||||
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<join name="join_extend" to="splitForCommunities"/>
|
||||||
<action name="splitForCommunities">
|
<action name="splitForCommunities">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
|
|
@ -298,6 +298,7 @@
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/validrelation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/validrelation</arg>
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
## This is a classpath-based import file (this header is required)
|
|
||||||
dump_common classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/commoncommunityfunder/oozie_app
|
|
|
@ -77,12 +77,36 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="fork_result_linked_to_projects"/>
|
<start to="prepareResultProject"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="prepareResultProject">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Prepare association result subset of project info</name>
|
||||||
|
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="fork_result_linked_to_projects"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<fork name="fork_result_linked_to_projects">
|
<fork name="fork_result_linked_to_projects">
|
||||||
<path start="select_publication_linked_to_projects"/>
|
<path start="select_publication_linked_to_projects"/>
|
||||||
|
@ -111,7 +135,8 @@
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
||||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -137,7 +162,8 @@
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
||||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -163,7 +189,8 @@
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
||||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -189,41 +216,14 @@
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
||||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
<arg>--graphPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_link"/>
|
<ok to="join_link"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<join name="join_link" to="common_action_community_funder"/>
|
<join name="join_link" to="dump_funder_results"/>
|
||||||
|
|
||||||
<action name="common_action_community_funder">
|
|
||||||
<sub-workflow>
|
|
||||||
<app-path>${wf:appPath()}/dump_common
|
|
||||||
</app-path>
|
|
||||||
<propagate-configuration/>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
|
||||||
<value>${sourcePath}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>selectedResults</name>
|
|
||||||
<value>${workingDir}/result</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>communityMapPath</name>
|
|
||||||
<value>${workingDir}/communityMap</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<value>${workingDir}</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
||||||
</sub-workflow>
|
|
||||||
<ok to="dump_funder_results" />
|
|
||||||
<error to="Kill" />
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_funder_results">
|
<action name="dump_funder_results">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
@ -242,9 +242,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/result</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
[
|
[
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
||||||
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the source Path", "paramRequired": true},
|
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the source Path", "paramRequired": true},
|
||||||
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the path of the raw graph", "paramRequired": true}
|
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the path of the raw graph", "paramRequired": true},
|
||||||
|
{"paramName":"r", "paramLongName":"filterRelation", "paramDescription": "the relation to filter", "paramRequired": false}
|
||||||
]
|
]
|
|
@ -3,5 +3,7 @@
|
||||||
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the source Path", "paramRequired": true},
|
{"paramName":"s", "paramLongName":"sourcePath", "paramDescription": "the source Path", "paramRequired": true},
|
||||||
{"paramName":"su", "paramLongName":"scholixUpdatePath", "paramDescription": "the scholix updated Path", "paramRequired": false},
|
{"paramName":"su", "paramLongName":"scholixUpdatePath", "paramDescription": "the scholix updated Path", "paramRequired": false},
|
||||||
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the path of the raw graph", "paramRequired": true},
|
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the path of the raw graph", "paramRequired": true},
|
||||||
{"paramName":"o", "paramLongName":"objectType", "paramDescription": "should be scholix or Summary", "paramRequired": true}
|
{"paramName":"o", "paramLongName":"objectType", "paramDescription": "should be scholix or Summary", "paramRequired": true},
|
||||||
|
{"paramName":"mp", "paramLongName":"maxPidNumberFilter", "paramDescription": "filter max number of pids in source/target", "paramRequired": false}
|
||||||
|
|
||||||
]
|
]
|
|
@ -0,0 +1,10 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,145 @@
|
||||||
|
<workflow-app name="Create Scholix Dump" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>sourcePath</name>
|
||||||
|
<description>the working dir base path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>targetPath</name>
|
||||||
|
<description>the final graph path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>relationFilter</name>
|
||||||
|
<description>Filter relation semantic</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>maxNumberOfPid</name>
|
||||||
|
<description>filter relation with at least #maxNumberOfPid</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<start to="ImportDatasetEntities"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="ImportDatasetEntities">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Import JSONRDD to Dataset kryo</name>
|
||||||
|
<class>eu.dnetlib.dhp.sx.graph.SparkConvertRDDtoDataset</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.shuffle.partitions=3000
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
||||||
|
<arg>--filterRelation</arg><arg>${relationFilter}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="CreateSummaries"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="CreateSummaries">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Convert Entities to summaries</name>
|
||||||
|
<class>eu.dnetlib.dhp.sx.graph.SparkCreateSummaryObject</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.shuffle.partitions=20000
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${targetPath}/entities</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/provision/summaries</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="CreateScholix"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="CreateScholix">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Generate Scholix Dataset</name>
|
||||||
|
<class>eu.dnetlib.dhp.sx.graph.SparkCreateScholix</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.shuffle.partitions=30000
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
<arg>--summaryPath</arg><arg>${targetPath}/provision/summaries</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/provision/scholix</arg>
|
||||||
|
<arg>--relationPath</arg><arg>${targetPath}/relation</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="DropJSONPath"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="DropJSONPath">
|
||||||
|
<fs>
|
||||||
|
<delete path='${targetPath}/json'/>
|
||||||
|
<mkdir path='${targetPath}/json/'/>
|
||||||
|
</fs>
|
||||||
|
<ok to="SerializeScholix"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="SerializeScholix">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Serialize scholix to JSON</name>
|
||||||
|
<class>eu.dnetlib.dhp.sx.graph.SparkConvertObjectToJson</class>
|
||||||
|
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.shuffle.partitions=6000
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${targetPath}/provision/scholix/scholix</arg>
|
||||||
|
<arg>--targetPath</arg><arg>${targetPath}/json/scholix_json</arg>
|
||||||
|
<arg>--objectType</arg><arg>scholix</arg>
|
||||||
|
<arg>--maxPidNumberFilter</arg><arg>maxNumberOfPid</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -4,6 +4,7 @@ import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
import eu.dnetlib.dhp.schema.sx.scholix.Scholix
|
import eu.dnetlib.dhp.schema.sx.scholix.Scholix
|
||||||
import eu.dnetlib.dhp.schema.sx.summary.ScholixSummary
|
import eu.dnetlib.dhp.schema.sx.summary.ScholixSummary
|
||||||
|
import eu.dnetlib.dhp.sx.graph.SparkConvertObjectToJson.toInt
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.hadoop.io.compress.GzipCodec
|
import org.apache.hadoop.io.compress.GzipCodec
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
|
@ -12,6 +13,14 @@ import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
object SparkConvertObjectToJson {
|
object SparkConvertObjectToJson {
|
||||||
|
|
||||||
|
def toInt(s: String): Option[Int] = {
|
||||||
|
try {
|
||||||
|
Some(s.toInt)
|
||||||
|
} catch {
|
||||||
|
case e: Exception => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def main(args: Array[String]): Unit = {
|
def main(args: Array[String]): Unit = {
|
||||||
val log: Logger = LoggerFactory.getLogger(getClass)
|
val log: Logger = LoggerFactory.getLogger(getClass)
|
||||||
val conf: SparkConf = new SparkConf()
|
val conf: SparkConf = new SparkConf()
|
||||||
|
@ -37,6 +46,8 @@ object SparkConvertObjectToJson {
|
||||||
log.info(s"objectType -> $objectType")
|
log.info(s"objectType -> $objectType")
|
||||||
val scholixUpdatePath = parser.get("scholixUpdatePath")
|
val scholixUpdatePath = parser.get("scholixUpdatePath")
|
||||||
log.info(s"scholixUpdatePath -> $scholixUpdatePath")
|
log.info(s"scholixUpdatePath -> $scholixUpdatePath")
|
||||||
|
val maxPidNumberFilter = parser.get("maxPidNumberFilter")
|
||||||
|
log.info(s"maxPidNumberFilter -> $maxPidNumberFilter")
|
||||||
|
|
||||||
implicit val scholixEncoder: Encoder[Scholix] = Encoders.kryo[Scholix]
|
implicit val scholixEncoder: Encoder[Scholix] = Encoders.kryo[Scholix]
|
||||||
implicit val summaryEncoder: Encoder[ScholixSummary] = Encoders.kryo[ScholixSummary]
|
implicit val summaryEncoder: Encoder[ScholixSummary] = Encoders.kryo[ScholixSummary]
|
||||||
|
@ -47,12 +58,22 @@ object SparkConvertObjectToJson {
|
||||||
case "scholix" =>
|
case "scholix" =>
|
||||||
log.info("Serialize Scholix")
|
log.info("Serialize Scholix")
|
||||||
val d: Dataset[Scholix] = spark.read.load(sourcePath).as[Scholix]
|
val d: Dataset[Scholix] = spark.read.load(sourcePath).as[Scholix]
|
||||||
val u: Dataset[Scholix] = spark.read.load(s"$scholixUpdatePath/scholix").as[Scholix]
|
// val u: Dataset[Scholix] = spark.read.load(s"$scholixUpdatePath/scholix").as[Scholix]
|
||||||
d.union(u)
|
if (maxPidNumberFilter != null && toInt(maxPidNumberFilter).isDefined) {
|
||||||
.repartition(8000)
|
val mp = toInt(maxPidNumberFilter).get
|
||||||
.map(s => mapper.writeValueAsString(s))(Encoders.STRING)
|
d
|
||||||
.rdd
|
.filter(s => (s.getSource.getIdentifier.size() <= mp) && (s.getTarget.getIdentifier.size() <= mp))
|
||||||
.saveAsTextFile(targetPath, classOf[GzipCodec])
|
.map(s => mapper.writeValueAsString(s))(Encoders.STRING)
|
||||||
|
.rdd
|
||||||
|
.saveAsTextFile(targetPath, classOf[GzipCodec])
|
||||||
|
} else {
|
||||||
|
d
|
||||||
|
.repartition(8000)
|
||||||
|
.map(s => mapper.writeValueAsString(s))(Encoders.STRING)
|
||||||
|
.rdd
|
||||||
|
.saveAsTextFile(targetPath, classOf[GzipCodec])
|
||||||
|
}
|
||||||
|
|
||||||
case "summary" =>
|
case "summary" =>
|
||||||
log.info("Serialize Summary")
|
log.info("Serialize Summary")
|
||||||
val d: Dataset[ScholixSummary] = spark.read.load(sourcePath).as[ScholixSummary]
|
val d: Dataset[ScholixSummary] = spark.read.load(sourcePath).as[ScholixSummary]
|
||||||
|
|
|
@ -4,9 +4,11 @@ import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
import eu.dnetlib.dhp.schema.oaf.{OtherResearchProduct, Publication, Relation, Result, Software, Dataset => OafDataset}
|
import eu.dnetlib.dhp.schema.oaf.{OtherResearchProduct, Publication, Relation, Result, Software, Dataset => OafDataset}
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
|
import org.apache.commons.lang3.StringUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
import scala.collection.JavaConverters._
|
import scala.collection.JavaConverters._
|
||||||
|
|
||||||
object SparkConvertRDDtoDataset {
|
object SparkConvertRDDtoDataset {
|
||||||
|
@ -34,6 +36,9 @@ object SparkConvertRDDtoDataset {
|
||||||
val t = parser.get("targetPath")
|
val t = parser.get("targetPath")
|
||||||
log.info(s"targetPath -> $t")
|
log.info(s"targetPath -> $t")
|
||||||
|
|
||||||
|
val filterRelation = parser.get("filterRelation")
|
||||||
|
log.info(s"filterRelation -> $filterRelation")
|
||||||
|
|
||||||
val entityPath = s"$t/entities"
|
val entityPath = s"$t/entities"
|
||||||
val relPath = s"$t/relation"
|
val relPath = s"$t/relation"
|
||||||
val mapper = new ObjectMapper()
|
val mapper = new ObjectMapper()
|
||||||
|
@ -94,28 +99,44 @@ object SparkConvertRDDtoDataset {
|
||||||
|
|
||||||
log.info("Converting Relation")
|
log.info("Converting Relation")
|
||||||
|
|
||||||
val relationSemanticFilter = List(
|
if (filterRelation != null && StringUtils.isNoneBlank(filterRelation)) {
|
||||||
// "cites",
|
|
||||||
// "iscitedby",
|
|
||||||
"merges",
|
|
||||||
"ismergedin",
|
|
||||||
"HasAmongTopNSimilarDocuments",
|
|
||||||
"IsAmongTopNSimilarDocuments"
|
|
||||||
)
|
|
||||||
|
|
||||||
val rddRelation = spark.sparkContext
|
val rddRelation = spark.sparkContext
|
||||||
.textFile(s"$sourcePath/relation")
|
.textFile(s"$sourcePath/relation")
|
||||||
.map(s => mapper.readValue(s, classOf[Relation]))
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
||||||
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
||||||
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
||||||
//filter OpenCitations relations
|
//filter OpenCitations relations
|
||||||
.filter(r =>
|
.filter(r =>
|
||||||
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
||||||
"opencitations".equalsIgnoreCase(k.getValue)
|
"opencitations".equalsIgnoreCase(k.getValue)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
.filter(r => r.getSubRelType != null && r.getSubRelType.equalsIgnoreCase(filterRelation))
|
||||||
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
||||||
|
} else {
|
||||||
|
|
||||||
|
val relationSemanticFilter = List(
|
||||||
|
"merges",
|
||||||
|
"ismergedin",
|
||||||
|
"HasAmongTopNSimilarDocuments",
|
||||||
|
"IsAmongTopNSimilarDocuments"
|
||||||
)
|
)
|
||||||
.filter(r => !relationSemanticFilter.exists(k => k.equalsIgnoreCase(r.getRelClass)))
|
|
||||||
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
val rddRelation = spark.sparkContext
|
||||||
|
.textFile(s"$sourcePath/relation")
|
||||||
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
||||||
|
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
||||||
|
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
||||||
|
//filter OpenCitations relations
|
||||||
|
.filter(r =>
|
||||||
|
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
||||||
|
"opencitations".equalsIgnoreCase(k.getValue)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.filter(r => !relationSemanticFilter.exists(k => k.equalsIgnoreCase(r.getRelClass)))
|
||||||
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -321,4 +321,27 @@ public class PrepareResultProjectJobTest {
|
||||||
3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count());
|
3, resultExplodedProvenance.filter("provenance = 'sysimport:crosswalk:entityregistry'").count());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMatchx() throws Exception {
|
||||||
|
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
SparkPrepareResultProject.main(new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-outputPath", workingDir.toString() + "/preparedInfo",
|
||||||
|
"-sourcePath", sourcePath
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<ResultProject> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/preparedInfo")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, ResultProject.class));
|
||||||
|
|
||||||
|
tmp.foreach(r -> System.out.println(OBJECT_MAPPER.writeValueAsString(r)));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.slf4j.LoggerFactory;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject;
|
import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
@ -76,7 +77,11 @@ public class ResultLinkedToProjectTest {
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String graphPath = getClass()
|
final String graphPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/nomatch")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/preparedInfo")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
final String communityMapPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/communityMapPath")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkResultLinkedToProject.main(new String[] {
|
SparkResultLinkedToProject.main(new String[] {
|
||||||
|
@ -84,20 +89,18 @@ public class ResultLinkedToProjectTest {
|
||||||
"-outputPath", workingDir.toString() + "/preparedInfo",
|
"-outputPath", workingDir.toString() + "/preparedInfo",
|
||||||
"-sourcePath", sourcePath,
|
"-sourcePath", sourcePath,
|
||||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
|
||||||
"-graphPath", graphPath
|
"-graphPath", graphPath,
|
||||||
|
"-communityMapPath", communityMapPath
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Result> tmp = sc
|
JavaRDD<CommunityResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/preparedInfo")
|
.textFile(workingDir.toString() + "/preparedInfo")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Result.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<Result> verificationDataset = spark
|
Assertions.assertEquals(0, tmp.count());
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(Result.class));
|
|
||||||
|
|
||||||
Assertions.assertEquals(0, verificationDataset.count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,8 +111,12 @@ public class ResultLinkedToProjectTest {
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/papers.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match/papers.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String relationPath = getClass()
|
final String graphPath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/match")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/preparedInfo")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
final String communityMapPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/communityMapPath")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkResultLinkedToProject.main(new String[] {
|
SparkResultLinkedToProject.main(new String[] {
|
||||||
|
@ -117,20 +124,18 @@ public class ResultLinkedToProjectTest {
|
||||||
"-outputPath", workingDir.toString() + "/preparedInfo",
|
"-outputPath", workingDir.toString() + "/preparedInfo",
|
||||||
"-sourcePath", sourcePath,
|
"-sourcePath", sourcePath,
|
||||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Publication",
|
||||||
"-graphPath", relationPath
|
"-graphPath", graphPath,
|
||||||
|
"-communityMapPath", communityMapPath
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Publication> tmp = sc
|
JavaRDD<CommunityResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/preparedInfo")
|
.textFile(workingDir.toString() + "/preparedInfo")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Publication.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
|
|
||||||
org.apache.spark.sql.Dataset<Publication> verificationDataset = spark
|
Assertions.assertEquals(1, tmp.count());
|
||||||
.createDataset(tmp.rdd(), Encoders.bean(Publication.class));
|
|
||||||
|
|
||||||
Assertions.assertEquals(1, verificationDataset.count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,10 +5,14 @@ import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
// import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults2;
|
||||||
|
// import eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkGetFunderList;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
@ -68,20 +72,19 @@ public class SplitPerFunderTest {
|
||||||
void test1() throws Exception {
|
void test1() throws Exception {
|
||||||
|
|
||||||
final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/extendeddump")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/funderresource/ext")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
SparkDumpFunderResults.main(new String[] {
|
SparkDumpFunderResults.main(new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-outputPath", workingDir.toString() + "/split",
|
"-outputPath", workingDir.toString() + "/split",
|
||||||
"-sourcePath", sourcePath,
|
"-sourcePath", sourcePath
|
||||||
"-graphPath", sourcePath
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
// FP7 3
|
// FP7 3 and H2020 3
|
||||||
JavaRDD<CommunityResult> tmp = sc
|
JavaRDD<CommunityResult> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/split/EC_FP7")
|
.textFile(workingDir.toString() + "/split/EC_FP7")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
|
@ -143,11 +146,6 @@ public class SplitPerFunderTest {
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
||||||
Assertions.assertEquals(1, tmp.count());
|
Assertions.assertEquals(1, tmp.count());
|
||||||
|
|
||||||
// CONICYT 0
|
|
||||||
tmp = sc
|
|
||||||
.textFile(workingDir.toString() + "/split/CONICYTF")
|
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, CommunityResult.class));
|
|
||||||
Assertions.assertEquals(0, tmp.count());
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,125 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.dump.projectssubset;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
|
||||||
|
|
||||||
|
public class ProjectSubsetTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
private static SparkSession spark;
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectSubsetTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(
|
||||||
|
eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectSubsetTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectSubsetTest.class.getSimpleName());
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectSubsetTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testAllNew() throws Exception {
|
||||||
|
final String projectListPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/projectsubset/projectId")
|
||||||
|
.getPath();
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/projectsubset/allnew/projects")
|
||||||
|
.getPath();
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(projectListPath)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.text(workingDir.toString() + "/projectIds");
|
||||||
|
ProjectsSubsetSparkJob.main(new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-outputPath", workingDir.toString() + "/projects",
|
||||||
|
"-sourcePath", sourcePath,
|
||||||
|
"-projectListPath", workingDir.toString() + "/projectIds"
|
||||||
|
});
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
JavaRDD<Project> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/projects")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Project.class));
|
||||||
|
Assertions.assertEquals(12, tmp.count());
|
||||||
|
Assertions.assertEquals(2, tmp.filter(p -> p.getId().substring(3, 15).equals("aka_________")).count());
|
||||||
|
Assertions.assertEquals(2, tmp.filter(p -> p.getId().substring(3, 15).equals("anr_________")).count());
|
||||||
|
Assertions.assertEquals(4, tmp.filter(p -> p.getId().substring(3, 15).equals("arc_________")).count());
|
||||||
|
Assertions.assertEquals(3, tmp.filter(p -> p.getId().substring(3, 15).equals("conicytf____")).count());
|
||||||
|
Assertions.assertEquals(1, tmp.filter(p -> p.getId().substring(3, 15).equals("corda_______")).count());
|
||||||
|
Assertions.assertEquals(40, sc.textFile(workingDir.toString() + "/projectIds").count());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testMatchOne() throws Exception {
|
||||||
|
final String projectListPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/projectsubset/projectId")
|
||||||
|
.getPath();
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/projectsubset/matchOne/projects")
|
||||||
|
.getPath();
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(projectListPath)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.text(workingDir.toString() + "/projectIds");
|
||||||
|
ProjectsSubsetSparkJob.main(new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-outputPath", workingDir.toString() + "/projects",
|
||||||
|
"-sourcePath", sourcePath,
|
||||||
|
"-projectListPath", workingDir.toString() + "/projectIds"
|
||||||
|
});
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
JavaRDD<Project> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/projects")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Project.class));
|
||||||
|
Assertions.assertEquals(11, tmp.count());
|
||||||
|
Assertions.assertEquals(2, tmp.filter(p -> p.getId().substring(3, 15).equals("aka_________")).count());
|
||||||
|
Assertions.assertEquals(2, tmp.filter(p -> p.getId().substring(3, 15).equals("anr_________")).count());
|
||||||
|
Assertions.assertEquals(4, tmp.filter(p -> p.getId().substring(3, 15).equals("arc_________")).count());
|
||||||
|
Assertions.assertEquals(3, tmp.filter(p -> p.getId().substring(3, 15).equals("conicytf____")).count());
|
||||||
|
Assertions.assertEquals(0, tmp.filter(p -> p.getId().substring(3, 15).equals("corda__h2020")).count());
|
||||||
|
Assertions.assertEquals(39, sc.textFile(workingDir.toString() + "/projectIds").count());
|
||||||
|
}
|
||||||
|
}
|
|
@ -122,6 +122,10 @@ class MappersTest {
|
||||||
assertNotNull(p.getBestaccessright());
|
assertNotNull(p.getBestaccessright());
|
||||||
assertEquals("OPEN", p.getBestaccessright().getClassid());
|
assertEquals("OPEN", p.getBestaccessright().getClassid());
|
||||||
|
|
||||||
|
assertNotNull(p.getFulltext());
|
||||||
|
assertEquals(1, p.getFulltext().size());
|
||||||
|
assertEquals("https://oneecosystem.pensoft.net/article/13718/", p.getFulltext().get(0).getValue());
|
||||||
|
|
||||||
// RESULT PROJECT
|
// RESULT PROJECT
|
||||||
List<Relation> resultProject = list
|
List<Relation> resultProject = list
|
||||||
.stream()
|
.stream()
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
{"ee":"SDSN - Greece","epos":"EPOS","enrmaps":"Energy Research","fet-h2020":"FET H2020","instruct":"Instruct-Eric","egi":"EGI Federation","euromarine":"Euromarine","covid-19":"COVID-19","dariah":"DARIAH EU","rda":"Research Data Alliance","clarin":"CLARIN","aginfra":"Agricultural and Food Sciences","risis":"RISI","fam":"Fisheries and Aquaculture Management","beopen":"Transport Research","elixir-gr":"ELIXIR GR","fet-fp7":"FET FP7","ifremer":"Ifremer","science-innovation-policy":"Science and Innovation Policy Studies","mes":"European Marine Scinece","oa-pg":"EC Post-Grant Open Access Pilot","ni":"Neuroinformatics","dh-ch":"Digital Humanities and Cultural Heritage"}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,8 @@
|
||||||
|
NSF
|
||||||
|
CIHR
|
||||||
|
NWO
|
||||||
|
NHMRC
|
||||||
|
NIH
|
||||||
|
MZOS
|
||||||
|
SNSF
|
||||||
|
EC
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
{"resultId":"50|a89337edbe55::43e8b61e5e8d682545cb867be8118585","projectsList":[{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","code":"135027","acronym":null,"title":"Dynamic 3D resolution-enhanced low-coherence interferometric imaging / Consortium: Hi-Lo","funder":{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null},"provenance":{"provenance":"Harvested","trust":"0.900000000000000022"},"validated":null},{"id":"40|aka_________::9d1af21dbd0f5bc719f71553d19a6b3a","code":"316061","acronym":null,"title":"Finnish Imaging of Degenerative Shoulder Study (FIMAGE): A study on the prevalence of degenerative imaging changes of the shoulder and their relevance to clinical symptoms in the general population.","funder":{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","fundingStream":null},"provenance":{"provenance":"Harvested","trust":"0.900000000000000022"},"validated":null}]}
|
|
@ -0,0 +1,12 @@
|
||||||
|
{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","websiteurl":null,"code":"135027","acronym":null,"title":"Dynamic 3D resolution-enhanced low-coherence interferometric imaging / Consortium: Hi-Lo","startdate":null,"enddate":null,"callidentifier":"Fotoniikka ja modernit kuvantamismenetelmät LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|aka_________::9d1af21dbd0f5bc719f71553d19a6b3a","websiteurl":null,"code":"316061","acronym":null,"title":"Finnish Imaging of Degenerative Shoulder Study (FIMAGE): A study on the prevalence of degenerative imaging changes of the shoulder and their relevance to clinical symptoms in the general population.","startdate":null,"enddate":null,"callidentifier":"Academy Project Funding TT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|anr_________::1f21edc5c902be305ee47148955c6e50","websiteurl":null,"code":"ANR-17-CE05-0033","acronym":"MOISE","title":"METAL OXIDES AS LOW LOADED NANO-IRIDIUM SUPPORT FOR COMPETITIVE WATER ELECTROLYSIS","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|anr_________::547e78ffdcb7d72a1ef31058dede3a33","websiteurl":null,"code":"ANR-09-SEGI-0005","acronym":"GALAXY","title":"DEVELOPPEMENT COLLABORATIF DE SYSTEMES COMPLEXES SELON UNE APPROCHE GUIDEE PAR LES MODELES","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::838e781a8d479e27a11101421fd8b296","websiteurl":"http://purl.org/au-research/grants/arc/LE0347462","code":"LE0347462","acronym":null,"title":"Femtosecond laser micromachining facility","startdate":"2003-01-01","enddate":"2003-12-31","callidentifier":null,"keywords":"biomedical nanostructures,femtosecond laser machining,laser manufacturing,laser micromachining,microphotonics,photonic bandgap structures","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Infrastructure, Equipment and Facilities","description":"Linkage Infrastructure, Equipment and Facilities"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::a461f180f7b6700c0499d4d3d53e58c7","websiteurl":"http://purl.org/au-research/grants/arc/LP140100567","code":"LP140100567","acronym":null,"title":"Linkage Projects - Grant ID: LP140100567","startdate":"2014-01-01","enddate":"2017-12-31","callidentifier":null,"keywords":"EDUCATIONAL MEASUREMENT; EDUCATIONAL MEASUREMENT; HIGH-STAKES TESTING; HIGH-STAKES TESTING; PERFORMANCE ASSESSMENT; PERFORMANCE ASSESSMENT; PERFORMANCE ASSESSMENT","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Projects","description":"Linkage Projects"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::b46b9e07d4cea67ccf497520a75ad0c8","websiteurl":"http://purl.org/au-research/grants/arc/DP180101235","code":"DP180101235","acronym":null,"title":"Discovery Projects - Grant ID: DP180101235","startdate":"2018-01-01","enddate":"2023-12-31","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Discovery Projects","description":"Discovery Projects"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::c5f86314ce288f91a7f31c219b128fab","websiteurl":"http://purl.org/au-research/grants/arc/LE0989831","code":"LE0989831","acronym":null,"title":"The Australian Music Navigator: research infrastructure for discovering, accessing and analysing Australia's musical landscape","startdate":"2009-01-01","enddate":"2009-12-31","callidentifier":null,"keywords":"database metadata,digital sound,electroacoustic music,film music,music,music information retrieval","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Infrastructure, Equipment and Facilities","description":"Linkage Infrastructure, Equipment and Facilities"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::05539f3427ad605d7c1de0168f3e337f","websiteurl":"http://repositorio.conicyt.cl/handle/10533/183109","code":"3120023","acronym":null,"title":"SYNTHESIS AND STRUCTURE-ACTIVITY RELATIONSHIPS OF HETEROARYLISOQUINOLINE- AND PHENANTHRIDINEQUINONES AS ANTITUMOR AGENTS","startdate":"2011-01-01","enddate":"2014-01-28","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::POSTDOCTORADO","description":"Fondecyt fundings - Fondecyt stream, POSTDOCTORADO"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::96b47b91a6c061e31f626612b1650c03","websiteurl":"http://repositorio.conicyt.cl/handle/10533/163340","code":"1040240","acronym":null,"title":"ESTUDIO TEORICO-EXPERIMENTAL DE LA PERMEACION DE FLUIDOS SUPERCRITICOS Y LA SEPARACION DE MEZCLAS A ALTA PRESION A TRAVES DE MEMBRANAS MICROPOROSAS.","startdate":"2004-01-15","enddate":"2007-01-15","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::REGULAR","description":"Fondecyt fundings - Fondecyt stream, REGULAR"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::b122147e0a13f34cdb6311a9d714f9a5","websiteurl":"http://repositorio.conicyt.cl/handle/10533/162452","code":"1020683","acronym":null,"title":"SINTESIS Y CARACTERIZACION DE SALES CUATERNARIAS CON EL ANION CALCOFOSFATO [P2Qy]4- (Q=S,Se;y=6,7) PROPIEDADES FISICAS Y REACCIONES DE INCLUSION.","startdate":"2002-01-15","enddate":"2006-01-15","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::REGULAR","description":"Fondecyt fundings - Fondecyt stream, REGULAR"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|corda_______::132bac68f17bb81c451d9071be6e4d6d","websiteurl":null,"code":"628405","acronym":"ANIM","title":"Precisely Defined, Surface-Engineered Nanostructures via Crystallization-Driven Self-Assembly of Linear-Dendritic Block Copolymers","startdate":"2014-05-01","enddate":"2016-04-30","callidentifier":"FP7-PEOPLE-2013-IIF","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"EC","name":"European Commission","jurisdiction":"EU","funding_stream":{"id":"EC::FP7::SP3::PEOPLE","description":"SEVENTH FRAMEWORK PROGRAMME - SP3-People - Marie-Curie Actions"}}],"summary":null,"granted":null,"h2020programme":[]}
|
|
@ -0,0 +1,12 @@
|
||||||
|
{"id":"40|aka_________::01bb7b48e29d732a1c7bc5150b9195c4","websiteurl":null,"code":"135027","acronym":null,"title":"Dynamic 3D resolution-enhanced low-coherence interferometric imaging / Consortium: Hi-Lo","startdate":null,"enddate":null,"callidentifier":"Fotoniikka ja modernit kuvantamismenetelmät LT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|aka_________::9d1af21dbd0f5bc719f71553d19a6b3a","websiteurl":null,"code":"316061","acronym":null,"title":"Finnish Imaging of Degenerative Shoulder Study (FIMAGE): A study on the prevalence of degenerative imaging changes of the shoulder and their relevance to clinical symptoms in the general population.","startdate":null,"enddate":null,"callidentifier":"Academy Project Funding TT","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"AKA","name":"Academy of Finland","jurisdiction":"FI","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|anr_________::1f21edc5c902be305ee47148955c6e50","websiteurl":null,"code":"ANR-17-CE05-0033","acronym":"MOISE","title":"METAL OXIDES AS LOW LOADED NANO-IRIDIUM SUPPORT FOR COMPETITIVE WATER ELECTROLYSIS","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|anr_________::547e78ffdcb7d72a1ef31058dede3a33","websiteurl":null,"code":"ANR-09-SEGI-0005","acronym":"GALAXY","title":"DEVELOPPEMENT COLLABORATIF DE SYSTEMES COMPLEXES SELON UNE APPROCHE GUIDEE PAR LES MODELES","startdate":null,"enddate":null,"callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ANR","name":"French National Research Agency (ANR)","jurisdiction":"FR","funding_stream":null}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::838e781a8d479e27a11101421fd8b296","websiteurl":"http://purl.org/au-research/grants/arc/LE0347462","code":"LE0347462","acronym":null,"title":"Femtosecond laser micromachining facility","startdate":"2003-01-01","enddate":"2003-12-31","callidentifier":null,"keywords":"biomedical nanostructures,femtosecond laser machining,laser manufacturing,laser micromachining,microphotonics,photonic bandgap structures","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Infrastructure, Equipment and Facilities","description":"Linkage Infrastructure, Equipment and Facilities"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::a461f180f7b6700c0499d4d3d53e58c7","websiteurl":"http://purl.org/au-research/grants/arc/LP140100567","code":"LP140100567","acronym":null,"title":"Linkage Projects - Grant ID: LP140100567","startdate":"2014-01-01","enddate":"2017-12-31","callidentifier":null,"keywords":"EDUCATIONAL MEASUREMENT; EDUCATIONAL MEASUREMENT; HIGH-STAKES TESTING; HIGH-STAKES TESTING; PERFORMANCE ASSESSMENT; PERFORMANCE ASSESSMENT; PERFORMANCE ASSESSMENT","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Projects","description":"Linkage Projects"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::b46b9e07d4cea67ccf497520a75ad0c8","websiteurl":"http://purl.org/au-research/grants/arc/DP180101235","code":"DP180101235","acronym":null,"title":"Discovery Projects - Grant ID: DP180101235","startdate":"2018-01-01","enddate":"2023-12-31","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Discovery Projects","description":"Discovery Projects"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|arc_________::c5f86314ce288f91a7f31c219b128fab","websiteurl":"http://purl.org/au-research/grants/arc/LE0989831","code":"LE0989831","acronym":null,"title":"The Australian Music Navigator: research infrastructure for discovering, accessing and analysing Australia's musical landscape","startdate":"2009-01-01","enddate":"2009-12-31","callidentifier":null,"keywords":"database metadata,digital sound,electroacoustic music,film music,music,music information retrieval","openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"ARC","name":"Australian Research Council (ARC)","jurisdiction":"AU","funding_stream":{"id":"ARC::Linkage Infrastructure, Equipment and Facilities","description":"Linkage Infrastructure, Equipment and Facilities"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::05539f3427ad605d7c1de0168f3e337f","websiteurl":"http://repositorio.conicyt.cl/handle/10533/183109","code":"3120023","acronym":null,"title":"SYNTHESIS AND STRUCTURE-ACTIVITY RELATIONSHIPS OF HETEROARYLISOQUINOLINE- AND PHENANTHRIDINEQUINONES AS ANTITUMOR AGENTS","startdate":"2011-01-01","enddate":"2014-01-28","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::POSTDOCTORADO","description":"Fondecyt fundings - Fondecyt stream, POSTDOCTORADO"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::96b47b91a6c061e31f626612b1650c03","websiteurl":"http://repositorio.conicyt.cl/handle/10533/163340","code":"1040240","acronym":null,"title":"ESTUDIO TEORICO-EXPERIMENTAL DE LA PERMEACION DE FLUIDOS SUPERCRITICOS Y LA SEPARACION DE MEZCLAS A ALTA PRESION A TRAVES DE MEMBRANAS MICROPOROSAS.","startdate":"2004-01-15","enddate":"2007-01-15","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::REGULAR","description":"Fondecyt fundings - Fondecyt stream, REGULAR"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|conicytf____::b122147e0a13f34cdb6311a9d714f9a5","websiteurl":"http://repositorio.conicyt.cl/handle/10533/162452","code":"1020683","acronym":null,"title":"SINTESIS Y CARACTERIZACION DE SALES CUATERNARIAS CON EL ANION CALCOFOSFATO [P2Qy]4- (Q=S,Se;y=6,7) PROPIEDADES FISICAS Y REACCIONES DE INCLUSION.","startdate":"2002-01-15","enddate":"2006-01-15","callidentifier":null,"keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"CONICYT","name":"Comisión Nacional de Investigación Científica y Tecnológica","jurisdiction":"CL","funding_stream":{"id":"CONICYT::FONDECYT::REGULAR","description":"Fondecyt fundings - Fondecyt stream, REGULAR"}}],"summary":null,"granted":null,"h2020programme":[]}
|
||||||
|
{"id":"40|corda__h2020::bf5d35ec8d24ae4abfb4a1c6a0af3856","websiteurl":null,"code":"628405","acronym":"ANIM","title":"Precisely Defined, Surface-Engineered Nanostructures via Crystallization-Driven Self-Assembly of Linear-Dendritic Block Copolymers","startdate":"2014-05-01","enddate":"2016-04-30","callidentifier":"FP7-PEOPLE-2013-IIF","keywords":null,"openaccessmandateforpublications":false,"openaccessmandatefordataset":false,"subject":[],"funding":[{"shortName":"EC","name":"European Commission","jurisdiction":"EU","funding_stream":{"id":"EC::FP7::SP3::PEOPLE","description":"SEVENTH FRAMEWORK PROGRAMME - SP3-People - Marie-Curie Actions"}}],"summary":null,"granted":null,"h2020programme":[]}
|
|
@ -0,0 +1,28 @@
|
||||||
|
40|nih_________::4c32cdbc4c9949853f02219fc4780a30
|
||||||
|
40|nih_________::b485512ef116af73bee79d50c8f9ca01
|
||||||
|
40|nih_________::b44d9bc8e99d9a0477ac06897e3e9c19
|
||||||
|
40|nih_________::7d2d2b7d1644a722a6bbcb031d82fec6
|
||||||
|
40|nsf_________::6b2674b0341e07b818a56c6f0daa2633
|
||||||
|
40|nih_________::96bb39aecc8f7b9f3b02ed36ef09538b
|
||||||
|
40|nsf_________::88d92bdf20ec2fac3ed9740f962b4fad
|
||||||
|
40|nih_________::4bb8c14729a0082378bb04db8321ce14
|
||||||
|
40|nih_________::08a8eed6c17c6d8e427afcfd29f87c7b
|
||||||
|
40|nsf_________::c314f3d35af1990121bf5b803937e112
|
||||||
|
40|nih_________::3ad6a2e6ebd561206f0da69468337f50
|
||||||
|
40|nih_________::d02c60c65a59629e69a30abcf2ceaed1
|
||||||
|
40|nih_________::d5a241cc94253feb72181cde15f51e96
|
||||||
|
40|nih_________::b5df718bbca69af50d4b7213e26af3f0
|
||||||
|
40|nih_________::bc90893c1be80503578e48f6ef6b7061
|
||||||
|
40|rcuk________::2c39b38c26c260b14a9816b88c91c132
|
||||||
|
40|nih_________::ab103ad117cd0579df66f7592a7d4adf
|
||||||
|
40|nih_________::147aa6ad8bd201e2a02c7b6cc3f68348
|
||||||
|
40|corda__h2020::bf5d35ec8d24ae4abfb4a1c6a0af3856
|
||||||
|
40|nih_________::b8083208156f2764d07c736ba9b49dd2
|
||||||
|
40|nih_________::f4d1e0aece0e6a9eff8d054c28e082db
|
||||||
|
40|nsf_________::56297da8b472a4be8ac3f09af813c9f6
|
||||||
|
40|nsf_________::6b6dc3398eeebb3de1ab66e6eb8c5cb3
|
||||||
|
40|nih_________::93289a36ebffb0bee3d6b01c6fc0a3d6
|
||||||
|
40|nih_________::6c3b00dd4ae9d43d6630ff18f189ebae
|
||||||
|
40|nih_________::1d983a87768f13bc8377b1b7d17290a2
|
||||||
|
40|nih_________::c3b56e91859b114644c1403e892eb80f
|
||||||
|
40|rcuk________::c1e15330fc7956063652f9c06e584548
|
|
@ -999,17 +999,11 @@ public class XmlRecordFactory implements Serializable {
|
||||||
private List<String> measuresAsXml(List<Measure> measures) {
|
private List<String> measuresAsXml(List<Measure> measures) {
|
||||||
return measures
|
return measures
|
||||||
.stream()
|
.stream()
|
||||||
.flatMap(
|
.map(m -> {
|
||||||
m -> m
|
List<Tuple2<String, String>> l = Lists.newArrayList(new Tuple2<>("id", m.getId()));
|
||||||
.getUnit()
|
m.getUnit().forEach(kv -> l.add(new Tuple2<>(kv.getKey(), kv.getValue())));
|
||||||
.stream()
|
return XmlSerializationUtils.asXmlElement("measure", l);
|
||||||
.map(
|
})
|
||||||
u -> Lists
|
|
||||||
.newArrayList(
|
|
||||||
new Tuple2<>("id", m.getId()),
|
|
||||||
new Tuple2<>("key", u.getKey()),
|
|
||||||
new Tuple2<>("value", u.getValue())))
|
|
||||||
.map(l -> XmlSerializationUtils.asXmlElement("measure", l)))
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -73,11 +73,20 @@ public class XmlRecordFactoryTest {
|
||||||
assertEquals("EUR", doc.valueOf("//processingchargecurrency/text()"));
|
assertEquals("EUR", doc.valueOf("//processingchargecurrency/text()"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"1.00889953098e-08", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'influence']/@value"));
|
"5.06690394631e-09", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'influence']/@score"));
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"30.6576853333", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity_alt']/@value"));
|
"C", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'influence']/@class"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"4.62970429725e-08", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity']/@value"));
|
"0.0", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity_alt']/@score"));
|
||||||
|
assertEquals(
|
||||||
|
"C", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity_alt']/@class"));
|
||||||
|
|
||||||
|
assertEquals(
|
||||||
|
"3.11855618382e-09", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity']/@score"));
|
||||||
|
assertEquals(
|
||||||
|
"C", doc.valueOf("//*[local-name() = 'result']/measure[./@id = 'popularity']/@class"));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -5,7 +5,37 @@
|
||||||
"unit": [
|
"unit": [
|
||||||
{
|
{
|
||||||
"key": "score",
|
"key": "score",
|
||||||
"value": "1.00889953098e-08"
|
"value": "5.06690394631e-09",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "class",
|
||||||
|
"value": "C",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -14,7 +44,37 @@
|
||||||
"unit": [
|
"unit": [
|
||||||
{
|
{
|
||||||
"key": "score",
|
"key": "score",
|
||||||
"value": "30.6576853333"
|
"value": "0.0",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "class",
|
||||||
|
"value": "C",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -23,7 +83,115 @@
|
||||||
"unit": [
|
"unit": [
|
||||||
{
|
{
|
||||||
"key": "score",
|
"key": "score",
|
||||||
"value": "4.62970429725e-08"
|
"value": "3.11855618382e-09",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "class",
|
||||||
|
"value": "C",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "influence_alt",
|
||||||
|
"unit": [
|
||||||
|
{
|
||||||
|
"key": "score",
|
||||||
|
"value": "0.0",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "class",
|
||||||
|
"value": "C",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "impulse",
|
||||||
|
"unit": [
|
||||||
|
{
|
||||||
|
"key": "score",
|
||||||
|
"value": "0.0",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "class",
|
||||||
|
"value": "C",
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": true,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "",
|
||||||
|
"inferenceprovenance": "update",
|
||||||
|
"provenanceaction": {
|
||||||
|
"classid": "measure:bip",
|
||||||
|
"classname": "Inferred by OpenAIRE",
|
||||||
|
"schemeid": "dnet:provenanceActions",
|
||||||
|
"schemename": "dnet:provenanceActions"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
2
pom.xml
2
pom.xml
|
@ -801,7 +801,7 @@
|
||||||
<mockito-core.version>3.3.3</mockito-core.version>
|
<mockito-core.version>3.3.3</mockito-core.version>
|
||||||
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
||||||
<vtd.version>[2.12,3.0)</vtd.version>
|
<vtd.version>[2.12,3.0)</vtd.version>
|
||||||
<dhp-schemas.version>[2.12.0]</dhp-schemas.version>
|
<dhp-schemas.version>[2.12.2-SNAPSHOT]</dhp-schemas.version>
|
||||||
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
||||||
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
||||||
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
||||||
|
|
Loading…
Reference in New Issue