forked from D-Net/dnet-hadoop
Merge branch 'beta' of https://code-repo.d4science.org/D-Net/dnet-hadoop into orcid_multipleworks_download
This commit is contained in:
commit
bfdf2dc390
|
@ -5,13 +5,71 @@ import java.io.BufferedInputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
||||
public class MakeTarArchive implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MakeTarArchive.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
MakeTarArchive.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/common/input_maketar_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String outputPath = parser.get("hdfsPath");
|
||||
log.info("hdfsPath: {}", outputPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("input path : {}", inputPath);
|
||||
|
||||
final int gBperSplit = Optional
|
||||
.ofNullable(parser.get("splitSize"))
|
||||
.map(Integer::valueOf)
|
||||
.orElse(10);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
|
||||
makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit);
|
||||
|
||||
}
|
||||
|
||||
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
|
||||
throws IOException {
|
||||
|
||||
RemoteIterator<LocatedFileStatus> dirIterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
||||
|
||||
while (dirIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = dirIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String pathString = p.toString();
|
||||
String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
|
||||
|
||||
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit);
|
||||
}
|
||||
}
|
||||
|
||||
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
|
||||
Path hdfsWritePath = new Path(outputPath);
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
|
@ -21,7 +79,7 @@ public class MakeTarArchive implements Serializable {
|
|||
return new TarArchiveOutputStream(fileSystem.create(hdfsWritePath).getWrappedStream());
|
||||
}
|
||||
|
||||
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
|
||||
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dirName)
|
||||
throws IOException {
|
||||
|
||||
Path hdfsWritePath = new Path(outputPath);
|
||||
|
@ -37,7 +95,7 @@ public class MakeTarArchive implements Serializable {
|
|||
new Path(inputPath), true);
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
writeCurrentFile(fileSystem, dir_name, iterator, ar, 0);
|
||||
writeCurrentFile(fileSystem, dirName, iterator, ar, 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -59,32 +117,30 @@ public class MakeTarArchive implements Serializable {
|
|||
new Path(inputPath), true);
|
||||
boolean next = fileStatusListIterator.hasNext();
|
||||
while (next) {
|
||||
TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
|
||||
try (TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar")) {
|
||||
|
||||
long current_size = 0;
|
||||
while (next && current_size < bytesPerSplit) {
|
||||
current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size);
|
||||
next = fileStatusListIterator.hasNext();
|
||||
long currentSize = 0;
|
||||
while (next && currentSize < bytesPerSplit) {
|
||||
currentSize = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, currentSize);
|
||||
next = fileStatusListIterator.hasNext();
|
||||
|
||||
}
|
||||
|
||||
partNum += 1;
|
||||
}
|
||||
|
||||
partNum += 1;
|
||||
ar.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static long writeCurrentFile(FileSystem fileSystem, String dir_name,
|
||||
private static long writeCurrentFile(FileSystem fileSystem, String dirName,
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator,
|
||||
TarArchiveOutputStream ar, long current_size) throws IOException {
|
||||
TarArchiveOutputStream ar, long currentSize) throws IOException {
|
||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String p_string = p.toString();
|
||||
if (!p_string.endsWith("_SUCCESS")) {
|
||||
String name = p_string.substring(p_string.lastIndexOf("/") + 1);
|
||||
String pString = p.toString();
|
||||
if (!pString.endsWith("_SUCCESS")) {
|
||||
String name = pString.substring(pString.lastIndexOf("/") + 1);
|
||||
if (name.startsWith("part-") & name.length() > 10) {
|
||||
String tmp = name.substring(0, 10);
|
||||
if (name.contains(".")) {
|
||||
|
@ -92,9 +148,9 @@ public class MakeTarArchive implements Serializable {
|
|||
}
|
||||
name = tmp;
|
||||
}
|
||||
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
|
||||
TarArchiveEntry entry = new TarArchiveEntry(dirName + "/" + name);
|
||||
entry.setSize(fileStatus.getLen());
|
||||
current_size += fileStatus.getLen();
|
||||
currentSize += fileStatus.getLen();
|
||||
ar.putArchiveEntry(entry);
|
||||
|
||||
InputStream is = fileSystem.open(fileStatus.getPath());
|
||||
|
@ -110,7 +166,7 @@ public class MakeTarArchive implements Serializable {
|
|||
ar.closeArchiveEntry();
|
||||
|
||||
}
|
||||
return current_size;
|
||||
return currentSize;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -191,7 +191,7 @@ public class ZenodoAPIClient implements Serializable {
|
|||
* @throws MissingConceptDoiException
|
||||
*/
|
||||
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
||||
setDepositionId(concept_rec_id);
|
||||
setDepositionId(concept_rec_id, 1);
|
||||
String json = "{}";
|
||||
|
||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
||||
|
@ -253,9 +253,10 @@ public class ZenodoAPIClient implements Serializable {
|
|||
|
||||
}
|
||||
|
||||
private void setDepositionId(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
||||
private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
|
||||
|
||||
ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(), ZenodoModelList.class);
|
||||
ZenodoModelList zenodoModelList = new Gson()
|
||||
.fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
|
||||
|
||||
for (ZenodoModel zm : zenodoModelList) {
|
||||
if (zm.getConceptrecid().equals(concept_rec_id)) {
|
||||
|
@ -263,16 +264,23 @@ public class ZenodoAPIClient implements Serializable {
|
|||
return;
|
||||
}
|
||||
}
|
||||
|
||||
throw new MissingConceptDoiException("The concept record id specified was missing in the list of depositions");
|
||||
if (zenodoModelList.size() == 0)
|
||||
throw new MissingConceptDoiException(
|
||||
"The concept record id specified was missing in the list of depositions");
|
||||
setDepositionId(concept_rec_id, page + 1);
|
||||
|
||||
}
|
||||
|
||||
private String getPrevDepositions() throws IOException {
|
||||
private String getPrevDepositions(String page) throws IOException {
|
||||
|
||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
||||
|
||||
HttpUrl.Builder urlBuilder = HttpUrl.parse(urlString).newBuilder();
|
||||
urlBuilder.addQueryParameter("page", page);
|
||||
String url = urlBuilder.build().toString();
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.url(urlString)
|
||||
.url(url)
|
||||
.addHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString()) // add request headers
|
||||
.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + access_token)
|
||||
.get()
|
||||
|
|
|
@ -419,4 +419,62 @@ public class OafMapperUtils {
|
|||
m.setUnit(Arrays.asList(newKeyValueInstance(key, value, dataInfo)));
|
||||
return m;
|
||||
}
|
||||
|
||||
public static Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final OafEntity entity) {
|
||||
return getRelation(source, target, relType, subRelType, relClass, entity, null);
|
||||
}
|
||||
|
||||
public static Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final OafEntity entity,
|
||||
final String validationDate) {
|
||||
return getRelation(
|
||||
source, target, relType, subRelType, relClass, entity.getCollectedfrom(), entity.getDataInfo(),
|
||||
entity.getLastupdatetimestamp(), validationDate, null);
|
||||
}
|
||||
|
||||
public static Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final List<KeyValue> collectedfrom,
|
||||
final DataInfo dataInfo,
|
||||
final Long lastupdatetimestamp) {
|
||||
return getRelation(
|
||||
source, target, relType, subRelType, relClass, collectedfrom, dataInfo, lastupdatetimestamp, null, null);
|
||||
}
|
||||
|
||||
public static Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final List<KeyValue> collectedfrom,
|
||||
final DataInfo dataInfo,
|
||||
final Long lastupdatetimestamp,
|
||||
final String validationDate,
|
||||
final List<KeyValue> properties) {
|
||||
final Relation rel = new Relation();
|
||||
rel.setRelType(relType);
|
||||
rel.setSubRelType(subRelType);
|
||||
rel.setRelClass(relClass);
|
||||
rel.setSource(source);
|
||||
rel.setTarget(target);
|
||||
rel.setCollectedfrom(collectedfrom);
|
||||
rel.setDataInfo(dataInfo);
|
||||
rel.setLastupdatetimestamp(lastupdatetimestamp);
|
||||
rel.setValidated(StringUtils.isNotBlank(validationDate));
|
||||
rel.setValidationDate(StringUtils.isNotBlank(validationDate) ? validationDate : null);
|
||||
rel.setProperties(properties);
|
||||
return rel;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ public class CollectorWorker extends ReportingJob {
|
|||
return new RestCollectorPlugin(clientParams);
|
||||
case file:
|
||||
return new FileCollectorPlugin(fileSystem);
|
||||
case fileGZip:
|
||||
case fileGzip:
|
||||
return new FileGZipCollectorPlugin(fileSystem);
|
||||
case other:
|
||||
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
||||
|
|
|
@ -10,7 +10,7 @@ import eu.dnetlib.dhp.common.collection.CollectorException;
|
|||
public interface CollectorPlugin {
|
||||
|
||||
enum NAME {
|
||||
oai, other, rest_json2xml, file, fileGZip;
|
||||
oai, other, rest_json2xml, file, fileGzip;
|
||||
|
||||
public enum OTHER_NAME {
|
||||
mdstore_mongodb_dump, mdstore_mongodb
|
||||
|
|
|
@ -17,6 +17,9 @@ public class PMArticle implements Serializable {
|
|||
* the Pubmed Identifier
|
||||
*/
|
||||
private String pmid;
|
||||
|
||||
private String pmcId;
|
||||
|
||||
/**
|
||||
* the DOI
|
||||
*/
|
||||
|
@ -122,7 +125,7 @@ public class PMArticle implements Serializable {
|
|||
|
||||
/**
|
||||
* The full journal title (taken from NLM cataloging data following NLM rules for how to compile a serial name) is exported in this element.
|
||||
* Some characters that are not part of the NLM MEDLINE/PubMed Character Set reside in a relatively small number of full journal titles.
|
||||
* Some characters that are not part of the NLM MEDLINE/PubMed Character Set reside in a relatively small number of full journal titles.
|
||||
* The NLM journal title abbreviation is exported in the <MedlineTA> element.
|
||||
*
|
||||
* @return the pubmed Journal Extracted
|
||||
|
@ -140,10 +143,11 @@ public class PMArticle implements Serializable {
|
|||
}
|
||||
|
||||
/**
|
||||
* English-language abstracts are taken directly from the published article.
|
||||
* If the article does not have a published abstract, the National Library of Medicine does not create one,
|
||||
* thus the record lacks the <Abstract> and <AbstractText> elements. However, in the absence of a formally
|
||||
* labeled abstract in the published article, text from a substantive "summary", "summary and conclusions" or "conclusions and summary" may be used.
|
||||
* <ArticleTitle> contains the entire title of the journal article. <ArticleTitle> is always in English;
|
||||
* those titles originally published in a non-English language and translated for <ArticleTitle> are enclosed in square brackets.
|
||||
* All titles end with a period unless another punctuation mark such as a question mark or bracket is present.
|
||||
* Explanatory information about the title itself is enclosed in parentheses, e.g.: (author's transl).
|
||||
* Corporate/collective authors may appear at the end of <ArticleTitle> for citations up to about the year 2000.
|
||||
*
|
||||
* @return the extracted pubmed Title
|
||||
*/
|
||||
|
@ -250,4 +254,13 @@ public class PMArticle implements Serializable {
|
|||
public List<PMGrant> getGrants() {
|
||||
return grants;
|
||||
}
|
||||
|
||||
public String getPmcId() {
|
||||
return pmcId;
|
||||
}
|
||||
|
||||
public PMArticle setPmcId(String pmcId) {
|
||||
this.pmcId = pmcId;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -584,7 +584,12 @@ object DataciteToOAFTransformation {
|
|||
JField("awardUri", JString(awardUri)) <- fundingReferences
|
||||
} yield awardUri
|
||||
|
||||
val oid = result.getId
|
||||
result.setId(IdentifierFactory.createIdentifier(result))
|
||||
if (!result.getId.equalsIgnoreCase(oid)) {
|
||||
result.setOriginalId((oid :: List(doi)).asJava)
|
||||
}
|
||||
|
||||
var relations: List[Relation] =
|
||||
awardUris.flatMap(a => get_projectRelation(a, result.getId)).filter(r => r != null)
|
||||
|
||||
|
|
|
@ -98,6 +98,7 @@ class PMParser(xml: XMLEventReader) extends Iterator[PMArticle] {
|
|||
case "PMID" => currentArticle.setPmid(text.trim)
|
||||
case "ArticleId" =>
|
||||
if ("doi".equalsIgnoreCase(currentArticleType)) currentArticle.setDoi(text.trim)
|
||||
if ("pmc".equalsIgnoreCase(currentArticleType)) currentArticle.setPmcId(text.trim)
|
||||
case "Language" => currentArticle.setLanguage(text.trim)
|
||||
case "ISSN" => currentJournal.setIssn(text.trim)
|
||||
case "GrantID" => currentGrant.setGrantID(text.trim)
|
||||
|
|
|
@ -4,9 +4,12 @@ import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup
|
|||
import eu.dnetlib.dhp.schema.common.ModelConstants
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.{GraphCleaningFunctions, IdentifierFactory, OafMapperUtils, PidType}
|
||||
import eu.dnetlib.dhp.schema.oaf._
|
||||
import collection.JavaConverters._
|
||||
import eu.dnetlib.dhp.utils.DHPUtils
|
||||
import org.apache.commons.lang3.StringUtils
|
||||
|
||||
import collection.JavaConverters._
|
||||
import java.util.regex.Pattern
|
||||
import scala.collection.mutable.ListBuffer
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -14,6 +17,9 @@ object PubMedToOaf {
|
|||
|
||||
val SUBJ_CLASS = "keywords"
|
||||
|
||||
val OAI_HEADER = "oai:pubmedcentral.nih.gov:"
|
||||
val OLD_PMC_PREFIX = "od_______267::"
|
||||
|
||||
val urlMap = Map(
|
||||
"pmid" -> "https://pubmed.ncbi.nlm.nih.gov/",
|
||||
"doi" -> "https://dx.doi.org/"
|
||||
|
@ -50,6 +56,15 @@ object PubMedToOaf {
|
|||
null
|
||||
}
|
||||
|
||||
def createOriginalOpenaireId(article: PMArticle): String = {
|
||||
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
||||
val md5 = DHPUtils.md5(s"$OAI_HEADER${article.getPmcId.replace("PMC", "")}")
|
||||
s"$OLD_PMC_PREFIX$md5"
|
||||
} else
|
||||
null
|
||||
|
||||
}
|
||||
|
||||
/** Create an instance of class extends Result
|
||||
* starting from OAF instanceType value
|
||||
*
|
||||
|
@ -122,16 +137,27 @@ object PubMedToOaf {
|
|||
return null
|
||||
|
||||
// MAP PMID into pid with classid = classname = pmid
|
||||
val pidList: List[StructuredProperty] = List(
|
||||
OafMapperUtils.structuredProperty(
|
||||
article.getPmid,
|
||||
PidType.pmid.toString,
|
||||
PidType.pmid.toString,
|
||||
val pidList = ListBuffer[StructuredProperty]()
|
||||
|
||||
pidList += OafMapperUtils.structuredProperty(
|
||||
article.getPmid,
|
||||
PidType.pmid.toString,
|
||||
PidType.pmid.toString,
|
||||
ModelConstants.DNET_PID_TYPES,
|
||||
ModelConstants.DNET_PID_TYPES,
|
||||
dataInfo
|
||||
)
|
||||
|
||||
if (StringUtils.isNotBlank(article.getPmcId)) {
|
||||
pidList += OafMapperUtils.structuredProperty(
|
||||
article.getPmcId,
|
||||
PidType.pmc.toString,
|
||||
PidType.pmc.toString,
|
||||
ModelConstants.DNET_PID_TYPES,
|
||||
ModelConstants.DNET_PID_TYPES,
|
||||
dataInfo
|
||||
)
|
||||
)
|
||||
}
|
||||
if (pidList == null)
|
||||
return null
|
||||
|
||||
|
@ -186,6 +212,7 @@ object PubMedToOaf {
|
|||
val urlLists: List[String] = pidList
|
||||
.map(s => (urlMap.getOrElse(s.getQualifier.getClassid, ""), s.getValue))
|
||||
.filter(t => t._1.nonEmpty)
|
||||
.toList
|
||||
.map(t => t._1 + t._2)
|
||||
if (urlLists != null)
|
||||
pubmedInstance.setUrl(urlLists.asJava)
|
||||
|
@ -262,7 +289,14 @@ object PubMedToOaf {
|
|||
|
||||
if (authors != null && authors.nonEmpty)
|
||||
result.setAuthor(authors.asJava)
|
||||
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
||||
|
||||
if (StringUtils.isNotEmpty(article.getPmcId)) {
|
||||
val originalIDS = ListBuffer[String]()
|
||||
originalIDS += createOriginalOpenaireId(article)
|
||||
pidList.map(s => s.getValue).foreach(p => originalIDS += p)
|
||||
result.setOriginalId(originalIDS.asJava)
|
||||
} else
|
||||
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
||||
|
||||
result.setId(article.getPmid)
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
package eu.dnetlib.dhp.actionmanager.ror;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.util.List;
|
||||
|
@ -38,25 +39,20 @@ class GenerateRorActionSetJobTest {
|
|||
.readValue(IOUtils.toString(getClass().getResourceAsStream("ror_org.json")), RorOrganization.class);
|
||||
final List<AtomicAction<? extends Oaf>> aas = GenerateRorActionSetJob.convertRorOrg(r);
|
||||
|
||||
Assertions.assertEquals(3, aas.size());
|
||||
Assertions.assertEquals(1, aas.size());
|
||||
assertEquals(Organization.class, aas.get(0).getClazz());
|
||||
assertEquals(Relation.class, aas.get(1).getClazz());
|
||||
assertEquals(Relation.class, aas.get(2).getClazz());
|
||||
|
||||
final Organization o = (Organization) aas.get(0).getPayload();
|
||||
final Relation r1 = (Relation) aas.get(1).getPayload();
|
||||
final Relation r2 = (Relation) aas.get(2).getPayload();
|
||||
|
||||
assertEquals(o.getId(), r1.getSource());
|
||||
assertEquals(r1.getSource(), r2.getTarget());
|
||||
assertEquals(r2.getSource(), r1.getTarget());
|
||||
assertEquals(ModelConstants.IS_PARENT_OF, r1.getRelClass());
|
||||
assertEquals(ModelConstants.IS_CHILD_OF, r2.getRelClass());
|
||||
assertNotNull(o);
|
||||
|
||||
assertNotNull(o.getCountry());
|
||||
assertEquals("AU", o.getCountry().getClassid());
|
||||
|
||||
assertNotNull(o.getLegalname());
|
||||
assertEquals("Mount Stromlo Observatory", o.getLegalname().getValue());
|
||||
|
||||
System.out.println(mapper.writeValueAsString(o));
|
||||
System.out.println(mapper.writeValueAsString(r1));
|
||||
System.out.println(mapper.writeValueAsString(r2));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -19,7 +19,9 @@ import org.apache.spark.sql.Encoder;
|
|||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.apache.spark.util.LongAccumulator;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
@ -50,7 +52,7 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
@Test
|
||||
@DisplayName("Test Date cleaner")
|
||||
void testDateCleaner() throws Exception {
|
||||
DateCleaner dc = new DateCleaner();
|
||||
final DateCleaner dc = new DateCleaner();
|
||||
assertEquals("1982-09-20", dc.clean("20/09/1982"));
|
||||
assertEquals("2002-09-20", dc.clean("20-09-2002"));
|
||||
assertEquals("2002-09-20", dc.clean("2002-09-20"));
|
||||
|
@ -68,9 +70,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_zenodo.xml")));
|
||||
// We Load the XSLT transformation Rule from the classpath
|
||||
XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||
|
||||
MetadataRecord result = tr.call(mr);
|
||||
final MetadataRecord result = tr.call(mr);
|
||||
|
||||
// Print the record
|
||||
System.out.println(result.getBody());
|
||||
|
@ -86,9 +88,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
||||
// We Load the XSLT transformation Rule from the classpath
|
||||
XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/zenodo_tr.xslt");
|
||||
|
||||
MetadataRecord result = tr.call(mr);
|
||||
final MetadataRecord result = tr.call(mr);
|
||||
|
||||
// Print the record
|
||||
System.out.println(result.getBody());
|
||||
|
@ -108,9 +110,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_itgv4.xml")));
|
||||
// We Load the XSLT transformation Rule from the classpath
|
||||
XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||
final XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||
|
||||
MetadataRecord result = tr.call(mr);
|
||||
final MetadataRecord result = tr.call(mr);
|
||||
|
||||
// Print the record
|
||||
System.out.println(result.getBody());
|
||||
|
@ -129,9 +131,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
mr.setProvenance(new Provenance("DSID", "DSNAME", "PREFIX"));
|
||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_omicsdi.xml")));
|
||||
// We Load the XSLT transformation Rule from the classpath
|
||||
XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||
final XSLTTransformationFunction tr = loadTransformationRule(xslTransformationScript);
|
||||
|
||||
MetadataRecord result = tr.call(mr);
|
||||
final MetadataRecord result = tr.call(mr);
|
||||
|
||||
// Print the record
|
||||
System.out.println(result.getBody());
|
||||
|
@ -140,7 +142,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
|
||||
@Test
|
||||
@DisplayName("Test TransformSparkJobNode.main with oaiOpenaire_datacite (v4)")
|
||||
void transformTestITGv4OAIdatacite(@TempDir Path testDir) throws Exception {
|
||||
void transformTestITGv4OAIdatacite(@TempDir
|
||||
final Path testDir) throws Exception {
|
||||
|
||||
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
||||
|
||||
|
@ -203,7 +206,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
|
||||
@Test
|
||||
@DisplayName("Test TransformSparkJobNode.main")
|
||||
void transformTest(@TempDir Path testDir) throws Exception {
|
||||
void transformTest(@TempDir
|
||||
final Path testDir) throws Exception {
|
||||
|
||||
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
|
||||
|
||||
|
@ -256,6 +260,25 @@ class TransformationJobTest extends AbstractVocabularyTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Test Transform Single XML using cnr_explora_tr XSLTTransformator")
|
||||
void testCnrExploraTransformSaxonHE() throws Exception {
|
||||
|
||||
// We Set the input Record getting the XML from the classpath
|
||||
final MetadataRecord mr = new MetadataRecord();
|
||||
|
||||
mr.setProvenance(new Provenance("openaire____::cnr_explora", "CNR ExploRA", "cnr_________"));
|
||||
mr.setBody(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/transform/input_cnr_explora.xml")));
|
||||
// We Load the XSLT transformation Rule from the classpath
|
||||
final XSLTTransformationFunction tr = loadTransformationRule("/eu/dnetlib/dhp/transform/cnr_explora_tr.xslt");
|
||||
|
||||
final MetadataRecord result = tr.call(mr);
|
||||
|
||||
// Print the record
|
||||
System.out.println(result.getBody());
|
||||
// TODO Create significant Assert
|
||||
}
|
||||
|
||||
private XSLTTransformationFunction loadTransformationRule(final String path) throws Exception {
|
||||
final String trValue = IOUtils.toString(this.getClass().getResourceAsStream(path));
|
||||
final LongAccumulator la = new LongAccumulator();
|
||||
|
|
|
@ -195,7 +195,9 @@
|
|||
<Title>Biochemical and biophysical research communications</Title>
|
||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Delineation of the intimate details of the backbone conformation of pyridine nucleotide coenzymes in aqueous solution.</ArticleTitle>
|
||||
<ArticleTitle>Delineation of the intimate details of the backbone conformation of pyridine nucleotide
|
||||
coenzymes in aqueous solution.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1173-9</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -473,7 +475,9 @@
|
|||
<Title>Biochemical and biophysical research communications</Title>
|
||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Effect of chloroquine on cultured fibroblasts: release of lysosomal hydrolases and inhibition of their uptake.</ArticleTitle>
|
||||
<ArticleTitle>Effect of chloroquine on cultured fibroblasts: release of lysosomal hydrolases and
|
||||
inhibition of their uptake.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1338-43</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -657,7 +661,8 @@
|
|||
<Title>Biochemical and biophysical research communications</Title>
|
||||
<ISOAbbreviation>Biochem Biophys Res Commun</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Atomic models for the polypeptide backbones of myohemerythrin and hemerythrin.</ArticleTitle>
|
||||
<ArticleTitle>Atomic models for the polypeptide backbones of myohemerythrin and hemerythrin.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1349-56</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -1627,7 +1632,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Comparison between procaine and isocarboxazid metabolism in vitro by a liver microsomal amidase-esterase.</ArticleTitle>
|
||||
<ArticleTitle>Comparison between procaine and isocarboxazid metabolism in vitro by a liver microsomal
|
||||
amidase-esterase.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1517-21</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -2030,7 +2037,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Radiochemical assay of glutathione S-epoxide transferase and its enhancement by phenobarbital in rat liver in vivo.</ArticleTitle>
|
||||
<ArticleTitle>Radiochemical assay of glutathione S-epoxide transferase and its enhancement by
|
||||
phenobarbital in rat liver in vivo.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1569-72</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -2350,7 +2359,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Identification of adenylate cyclase-coupled beta-adrenergic receptors with radiolabeled beta-adrenergic antagonists.</ArticleTitle>
|
||||
<ArticleTitle>Identification of adenylate cyclase-coupled beta-adrenergic receptors with radiolabeled
|
||||
beta-adrenergic antagonists.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1651-8</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -2598,7 +2609,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>The effect of adrenaline and of alpha- and beta-adrenergic blocking agents on ATP concentration and on incorporation of 32Pi into ATP in rat fat cells.</ArticleTitle>
|
||||
<ArticleTitle>The effect of adrenaline and of alpha- and beta-adrenergic blocking agents on ATP
|
||||
concentration and on incorporation of 32Pi into ATP in rat fat cells.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1659-62</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -2851,7 +2864,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Action of propranolol on mitochondrial functions--effects on energized ion fluxes in the presence of valinomycin.</ArticleTitle>
|
||||
<ArticleTitle>Action of propranolol on mitochondrial functions--effects on energized ion fluxes in the
|
||||
presence of valinomycin.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1701-5</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -3265,7 +3280,8 @@
|
|||
</Chemical>
|
||||
<Chemical>
|
||||
<RegistryNumber>EC 2.6.1.16</RegistryNumber>
|
||||
<NameOfSubstance UI="D005945">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)</NameOfSubstance>
|
||||
<NameOfSubstance UI="D005945">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)
|
||||
</NameOfSubstance>
|
||||
</Chemical>
|
||||
<Chemical>
|
||||
<RegistryNumber>EC 2.7.-</RegistryNumber>
|
||||
|
@ -3324,7 +3340,9 @@
|
|||
<DescriptorName UI="D005944" MajorTopicYN="N">Glucosamine</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D005945" MajorTopicYN="N">Glutamine-Fructose-6-Phosphate Transaminase (Isomerizing)</DescriptorName>
|
||||
<DescriptorName UI="D005945" MajorTopicYN="N">Glutamine-Fructose-6-Phosphate Transaminase
|
||||
(Isomerizing)
|
||||
</DescriptorName>
|
||||
<QualifierName UI="Q000378" MajorTopicYN="N">metabolism</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
|
@ -3463,7 +3481,8 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Inhibition of aldehyde reductase by acidic metabolites of the biogenic amines.</ArticleTitle>
|
||||
<ArticleTitle>Inhibition of aldehyde reductase by acidic metabolites of the biogenic amines.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1731-3</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -3696,7 +3715,9 @@
|
|||
<Title>Biochemical pharmacology</Title>
|
||||
<ISOAbbreviation>Biochem Pharmacol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Effects of 5,6-dihydroxytryptamine on tyrosine-hydroxylase activity in central catecholaminergic neurons of the rat.</ArticleTitle>
|
||||
<ArticleTitle>Effects of 5,6-dihydroxytryptamine on tyrosine-hydroxylase activity in central
|
||||
catecholaminergic neurons of the rat.
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1739-42</MedlinePgn>
|
||||
</Pagination>
|
||||
|
@ -4602,12 +4623,19 @@
|
|||
<Title>Arzneimittel-Forschung</Title>
|
||||
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>[Biochemical studies on camomile components/III. In vitro studies about the antipeptic activity of (--)-alpha-bisabolol (author's transl)].</ArticleTitle>
|
||||
<ArticleTitle>[Biochemical studies on camomile components/III. In vitro studies about the antipeptic
|
||||
activity of (--)-alpha-bisabolol (author's transl)].
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1352-4</MedlinePgn>
|
||||
</Pagination>
|
||||
<Abstract>
|
||||
<AbstractText>(--)-alpha-Bisabolol has a primary antipeptic action depending on dosage, which is not caused by an alteration of the pH-value. The proteolytic activity of pepsin is reduced by 50 percent through addition of bisabolol in the ratio of 1/0.5. The antipeptic action of bisabolol only occurs in case of direct contact. In case of a previous contact with the substrate, the inhibiting effect is lost.</AbstractText>
|
||||
<AbstractText>(--)-alpha-Bisabolol has a primary antipeptic action depending on dosage, which is not
|
||||
caused by an alteration of the pH-value. The proteolytic activity of pepsin is reduced by 50
|
||||
percent through addition of bisabolol in the ratio of 1/0.5. The antipeptic action of bisabolol
|
||||
only occurs in case of direct contact. In case of a previous contact with the substrate, the
|
||||
inhibiting effect is lost.
|
||||
</AbstractText>
|
||||
</Abstract>
|
||||
<AuthorList CompleteYN="Y">
|
||||
<Author ValidYN="Y">
|
||||
|
@ -4626,7 +4654,9 @@
|
|||
<PublicationType UI="D004740">English Abstract</PublicationType>
|
||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||
</PublicationTypeList>
|
||||
<VernacularTitle>Biochemische Untersuchungen von Kamilleninhaltsstoffen. III. In-vitro-Versuche über die antipeptische Wirkung des (-)-alpha-Bisabolols</VernacularTitle>
|
||||
<VernacularTitle>Biochemische Untersuchungen von Kamilleninhaltsstoffen. III. In-vitro-Versuche über die
|
||||
antipeptische Wirkung des (-)-alpha-Bisabolols
|
||||
</VernacularTitle>
|
||||
</Article>
|
||||
<MedlineJournalInfo>
|
||||
<Country>Germany</Country>
|
||||
|
@ -4753,12 +4783,37 @@
|
|||
<Title>Arzneimittel-Forschung</Title>
|
||||
<ISOAbbreviation>Arzneimittelforschung</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>[Demonstration of tumor inhibiting properties of a strongly immunostimulating low-molecular weight substance. Comparative studies with ifosfamide on the immuno-labile DS carcinosarcoma. Stimulation of the autoimmune activity for approx. 20 days by BA 1, a N-(2-cyanoethylene)-urea. Novel prophylactic possibilities].</ArticleTitle>
|
||||
<ArticleTitle>[Demonstration of tumor inhibiting properties of a strongly immunostimulating
|
||||
low-molecular weight substance. Comparative studies with ifosfamide on the immuno-labile DS
|
||||
carcinosarcoma. Stimulation of the autoimmune activity for approx. 20 days by BA 1, a
|
||||
N-(2-cyanoethylene)-urea. Novel prophylactic possibilities].
|
||||
</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1369-79</MedlinePgn>
|
||||
</Pagination>
|
||||
<Abstract>
|
||||
<AbstractText>A report is given on the recent discovery of outstanding immunological properties in BA 1 [N-(2-cyanoethylene)-urea] having a (low) molecular mass M = 111.104. Experiments in 214 DS carcinosarcoma bearing Wistar rats have shown that BA 1, at a dosage of only about 12 percent LD50 (150 mg kg) and negligible lethality (1.7 percent), results in a recovery rate of 40 percent without hyperglycemia and, in one test, of 80 percent with hyperglycemia. Under otherwise unchanged conditions the reference substance ifosfamide (IF) -- a further development of cyclophosphamide -- applied without hyperglycemia in its most efficient dosage of 47 percent LD50 (150 mg kg) brought about a recovery rate of 25 percent at a lethality of 18 percent. (Contrary to BA 1, 250-min hyperglycemia caused no further improvement of the recovery rate.) However this comparison is characterized by the fact that both substances exhibit two quite different (complementary) mechanisms of action. Leucocyte counts made after application of the said cancerostatics and dosages have shown a pronounced stimulation with BA 1 and with ifosfamide, the known suppression in the post-therapeutic interval usually found with standard cancerostatics. In combination with the cited plaque test for BA 1, blood pictures then allow conclusions on the immunity status. Since IF can be taken as one of the most efficient cancerostatics--there is no other chemotherapeutic known up to now that has a more significant effect on the DS carcinosarcoma in rats -- these findings are of special importance. Finally, the total amount of leucocytes and lymphocytes as well as their time behaviour was determined from the blood picture of tumour-free rats after i.v. application of BA 1. The thus obtained numerical values clearly show that further research work on the prophylactic use of this substance seems to be necessary and very promising.</AbstractText>
|
||||
<AbstractText>A report is given on the recent discovery of outstanding immunological properties in
|
||||
BA 1 [N-(2-cyanoethylene)-urea] having a (low) molecular mass M = 111.104. Experiments in 214 DS
|
||||
carcinosarcoma bearing Wistar rats have shown that BA 1, at a dosage of only about 12 percent
|
||||
LD50 (150 mg kg) and negligible lethality (1.7 percent), results in a recovery rate of 40
|
||||
percent without hyperglycemia and, in one test, of 80 percent with hyperglycemia. Under
|
||||
otherwise unchanged conditions the reference substance ifosfamide (IF) -- a further development
|
||||
of cyclophosphamide -- applied without hyperglycemia in its most efficient dosage of 47 percent
|
||||
LD50 (150 mg kg) brought about a recovery rate of 25 percent at a lethality of 18 percent.
|
||||
(Contrary to BA 1, 250-min hyperglycemia caused no further improvement of the recovery rate.)
|
||||
However this comparison is characterized by the fact that both substances exhibit two quite
|
||||
different (complementary) mechanisms of action. Leucocyte counts made after application of the
|
||||
said cancerostatics and dosages have shown a pronounced stimulation with BA 1 and with
|
||||
ifosfamide, the known suppression in the post-therapeutic interval usually found with standard
|
||||
cancerostatics. In combination with the cited plaque test for BA 1, blood pictures then allow
|
||||
conclusions on the immunity status. Since IF can be taken as one of the most efficient
|
||||
cancerostatics--there is no other chemotherapeutic known up to now that has a more significant
|
||||
effect on the DS carcinosarcoma in rats -- these findings are of special importance. Finally,
|
||||
the total amount of leucocytes and lymphocytes as well as their time behaviour was determined
|
||||
from the blood picture of tumour-free rats after i.v. application of BA 1. The thus obtained
|
||||
numerical values clearly show that further research work on the prophylactic use of this
|
||||
substance seems to be necessary and very promising.
|
||||
</AbstractText>
|
||||
</Abstract>
|
||||
<AuthorList CompleteYN="Y">
|
||||
<Author ValidYN="Y">
|
||||
|
@ -4778,7 +4833,11 @@
|
|||
<PublicationType UI="D004740">English Abstract</PublicationType>
|
||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||
</PublicationTypeList>
|
||||
<VernacularTitle>Nachweis krebshemmender Eigenschaften einer stark immunstimulierenden Verbindung kleiner Molekülmasse. Versuche am immunlabilen DS-Karzinosarkom im Vergleich mit Ifosfamid. Stimulierung der körpereigenen Abwehr über etwa 20 Tage durch BA 1, einen N-(2-Cyanthylen)-harnstoff. Neue prophylaktische Möglichkeiten</VernacularTitle>
|
||||
<VernacularTitle>Nachweis krebshemmender Eigenschaften einer stark immunstimulierenden Verbindung
|
||||
kleiner Molekülmasse. Versuche am immunlabilen DS-Karzinosarkom im Vergleich mit Ifosfamid.
|
||||
Stimulierung der körpereigenen Abwehr über etwa 20 Tage durch BA 1, einen
|
||||
N-(2-Cyanthylen)-harnstoff. Neue prophylaktische Möglichkeiten
|
||||
</VernacularTitle>
|
||||
</Article>
|
||||
<MedlineJournalInfo>
|
||||
<Country>Germany</Country>
|
||||
|
@ -5016,7 +5075,20 @@
|
|||
<MedlinePgn>1400-3</MedlinePgn>
|
||||
</Pagination>
|
||||
<Abstract>
|
||||
<AbstractText>The distribution of blood flow to the subendocardial, medium and subepicardial layers of the left ventricular free wall was studied in anaesthetized dogs under normoxic (A), hypoxic (B) conditions and under pharmacologically induced (etafenone) coronary vasodilation (C). Regional myocardial blood flow was determined by means of the particle distribution method. In normoxia a transmural gradient of flow was observed, with the subendocardial layers receiving a significantly higher flow rate compared with the subepicardial layers. In hypoxia induced vasodilation this transmural gradient of flow was persistent. In contrast a marked redistribution of regional flow was observed under pharmacologically induced vasodilation. The transmural gradient decreased. In contrast to some findings these experiments demonstrate that a considerable vasodilatory capacity exists in all layers of the myocardium and can be utilized by drugs. The differences observed for the intramural distribution pattern of flow under hypoxia and drug induced vasodilation support the hypothesis that this pattern reflects corresponding gradients of regional myocardial metabolism.</AbstractText>
|
||||
<AbstractText>The distribution of blood flow to the subendocardial, medium and subepicardial layers
|
||||
of the left ventricular free wall was studied in anaesthetized dogs under normoxic (A), hypoxic
|
||||
(B) conditions and under pharmacologically induced (etafenone) coronary vasodilation (C).
|
||||
Regional myocardial blood flow was determined by means of the particle distribution method. In
|
||||
normoxia a transmural gradient of flow was observed, with the subendocardial layers receiving a
|
||||
significantly higher flow rate compared with the subepicardial layers. In hypoxia induced
|
||||
vasodilation this transmural gradient of flow was persistent. In contrast a marked
|
||||
redistribution of regional flow was observed under pharmacologically induced vasodilation. The
|
||||
transmural gradient decreased. In contrast to some findings these experiments demonstrate that a
|
||||
considerable vasodilatory capacity exists in all layers of the myocardium and can be utilized by
|
||||
drugs. The differences observed for the intramural distribution pattern of flow under hypoxia
|
||||
and drug induced vasodilation support the hypothesis that this pattern reflects corresponding
|
||||
gradients of regional myocardial metabolism.
|
||||
</AbstractText>
|
||||
</Abstract>
|
||||
<AuthorList CompleteYN="Y">
|
||||
<Author ValidYN="Y">
|
||||
|
@ -5185,4 +5257,151 @@
|
|||
</ReferenceList>
|
||||
</PubmedData>
|
||||
</PubmedArticle>
|
||||
<PubmedArticle>
|
||||
<MedlineCitation Status="MEDLINE" Owner="NLM">
|
||||
<PMID Version="1">4917185</PMID>
|
||||
<DateCompleted>
|
||||
<Year>1970</Year>
|
||||
<Month>10</Month>
|
||||
<Day>27</Day>
|
||||
</DateCompleted>
|
||||
<DateRevised>
|
||||
<Year>2018</Year>
|
||||
<Month>11</Month>
|
||||
<Day>13</Day>
|
||||
</DateRevised>
|
||||
<Article PubModel="Print">
|
||||
<Journal>
|
||||
<ISSN IssnType="Print">0003-6919</ISSN>
|
||||
<JournalIssue CitedMedium="Print">
|
||||
<Volume>19</Volume>
|
||||
<Issue>6</Issue>
|
||||
<PubDate>
|
||||
<Year>1970</Year>
|
||||
<Month>Jun</Month>
|
||||
</PubDate>
|
||||
</JournalIssue>
|
||||
<Title>Applied microbiology</Title>
|
||||
<ISOAbbreviation>Appl Microbiol</ISOAbbreviation>
|
||||
</Journal>
|
||||
<ArticleTitle>Bactericidal activity of a broad-spectrum illumination source.</ArticleTitle>
|
||||
<Pagination>
|
||||
<MedlinePgn>1013-4</MedlinePgn>
|
||||
</Pagination>
|
||||
<Abstract>
|
||||
|
||||
<AbstractText>Several hours of exposure to Vita-Lite lamps, which have a unique spectral
|
||||
distribution, give significant killing of cells of Staphylococcus aureus.
|
||||
</AbstractText>
|
||||
</Abstract>
|
||||
<AuthorList CompleteYN="Y">
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Himmelfarb</LastName>
|
||||
<ForeName>P</ForeName>
|
||||
<Initials>P</Initials>
|
||||
</Author>
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Scott</LastName>
|
||||
<ForeName>A</ForeName>
|
||||
<Initials>A</Initials>
|
||||
</Author>
|
||||
<Author ValidYN="Y">
|
||||
<LastName>Thayer</LastName>
|
||||
<ForeName>P S</ForeName>
|
||||
<Initials>PS</Initials>
|
||||
</Author>
|
||||
</AuthorList>
|
||||
<Language>eng</Language>
|
||||
<PublicationTypeList>
|
||||
<PublicationType UI="D016428">Journal Article</PublicationType>
|
||||
</PublicationTypeList>
|
||||
</Article>
|
||||
<MedlineJournalInfo>
|
||||
<Country>United States</Country>
|
||||
<MedlineTA>Appl Microbiol</MedlineTA>
|
||||
<NlmUniqueID>7605802</NlmUniqueID>
|
||||
<ISSNLinking>0003-6919</ISSNLinking>
|
||||
</MedlineJournalInfo>
|
||||
<CitationSubset>IM</CitationSubset>
|
||||
<MeshHeadingList>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D001431" MajorTopicYN="N">Bacteriological Techniques</DescriptorName>
|
||||
<QualifierName UI="Q000295" MajorTopicYN="Y">instrumentation</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D008027" MajorTopicYN="Y">Light</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D011830" MajorTopicYN="N">Radiation Effects</DescriptorName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D012706" MajorTopicYN="N">Serratia marcescens</DescriptorName>
|
||||
<QualifierName UI="Q000254" MajorTopicYN="N">growth & development</QualifierName>
|
||||
<QualifierName UI="Q000528" MajorTopicYN="Y">radiation effects</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D013210" MajorTopicYN="N">Staphylococcus</DescriptorName>
|
||||
<QualifierName UI="Q000254" MajorTopicYN="N">growth & development</QualifierName>
|
||||
<QualifierName UI="Q000528" MajorTopicYN="Y">radiation effects</QualifierName>
|
||||
</MeshHeading>
|
||||
<MeshHeading>
|
||||
<DescriptorName UI="D013242" MajorTopicYN="N">Sterilization</DescriptorName>
|
||||
</MeshHeading>
|
||||
</MeshHeadingList>
|
||||
</MedlineCitation>
|
||||
<PubmedData>
|
||||
<History>
|
||||
<PubMedPubDate PubStatus="pubmed">
|
||||
<Year>1970</Year>
|
||||
<Month>6</Month>
|
||||
<Day>1</Day>
|
||||
</PubMedPubDate>
|
||||
<PubMedPubDate PubStatus="medline">
|
||||
<Year>1970</Year>
|
||||
<Month>6</Month>
|
||||
<Day>1</Day>
|
||||
<Hour>0</Hour>
|
||||
<Minute>1</Minute>
|
||||
</PubMedPubDate>
|
||||
<PubMedPubDate PubStatus="entrez">
|
||||
<Year>1970</Year>
|
||||
<Month>6</Month>
|
||||
<Day>1</Day>
|
||||
<Hour>0</Hour>
|
||||
<Minute>0</Minute>
|
||||
</PubMedPubDate>
|
||||
</History>
|
||||
<PublicationStatus>ppublish</PublicationStatus>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">4917185</ArticleId>
|
||||
<ArticleId IdType="pmc">PMC376844</ArticleId>
|
||||
</ArticleIdList>
|
||||
<ReferenceList>
|
||||
<Reference>
|
||||
<Citation>Photochem Photobiol. 1969 Jan;9(1):99-102</Citation>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">4889809</ArticleId>
|
||||
</ArticleIdList>
|
||||
</Reference>
|
||||
<Reference>
|
||||
<Citation>Endocrinology. 1969 Dec;85(6):1218-21</Citation>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">5347623</ArticleId>
|
||||
</ArticleIdList>
|
||||
</Reference>
|
||||
<Reference>
|
||||
<Citation>Arch Mikrobiol. 1956;24(1):60-79</Citation>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">13327987</ArticleId>
|
||||
</ArticleIdList>
|
||||
</Reference>
|
||||
<Reference>
|
||||
<Citation>J Bacteriol. 1941 Sep;42(3):353-66</Citation>
|
||||
<ArticleIdList>
|
||||
<ArticleId IdType="pubmed">16560457</ArticleId>
|
||||
</ArticleIdList>
|
||||
</Reference>
|
||||
</ReferenceList>
|
||||
</PubmedData>
|
||||
</PubmedArticle>
|
||||
</PubmedArticleSet>
|
|
@ -0,0 +1,214 @@
|
|||
<xsl:stylesheet
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:oaire="http://namespace.openaire.eu/schema/oaire/"
|
||||
xmlns:vocabulary="http://eu/dnetlib/transform/clean"
|
||||
xmlns:dateCleaner="http://eu/dnetlib/transform/dateISO"
|
||||
xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||
xmlns:oai="http://www.openarchives.org/OAI/2.0/"
|
||||
xmlns:datacite="http://datacite.org/schema/kernel-4"
|
||||
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
exclude-result-prefixes="xsl vocabulary dateCleaner" version="2.0">
|
||||
<xsl:param name="varOfficialName" />
|
||||
<xsl:param name="varDataSourceId" />
|
||||
<xsl:param name="varFP7" select="'corda_______::'" />
|
||||
<xsl:param name="varH2020" select="'corda__h2020::'" />
|
||||
<xsl:param name="repoCode"
|
||||
select="substring-before(//*[local-name() = 'header']/*[local-name()='recordIdentifier'], ':')" />
|
||||
<xsl:param name="index" select="0" />
|
||||
<xsl:param name="transDate" select="current-dateTime()" />
|
||||
|
||||
<xsl:template match="/">
|
||||
<record>
|
||||
<xsl:apply-templates select="//*[local-name() = 'header']" />
|
||||
<metadata>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:title" />
|
||||
<xsl:with-param name="targetElement" select="'dc:title'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:creator/replace(., '^(.*)\|.*$', '$1')" />
|
||||
<xsl:with-param name="targetElement" select="'dc:creator'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:contributor" />
|
||||
<xsl:with-param name="targetElement" select="'dc:contributor'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:description" />
|
||||
<xsl:with-param name="targetElement" select="'dc:description'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:subject" />
|
||||
<xsl:with-param name="targetElement" select="'dc:subject'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:publisher" />
|
||||
<xsl:with-param name="targetElement" select="'dc:publisher'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:format" />
|
||||
<xsl:with-param name="targetElement" select="'dc:format'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:type" />
|
||||
<xsl:with-param name="targetElement" select="'dc:type'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:source" />
|
||||
<xsl:with-param name="targetElement" select="'dc:source'" />
|
||||
</xsl:call-template>
|
||||
<dc:language>
|
||||
<xsl:value-of select="vocabulary:clean( //dc:language, 'dnet:languages')" />
|
||||
</dc:language>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:rights" />
|
||||
<xsl:with-param name="targetElement" select="'dc:rights'" />
|
||||
</xsl:call-template>
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:relation[not(starts-with(.,'info:cnr-pdr'))]" />
|
||||
<xsl:with-param name="targetElement" select="'dc:relation'" />
|
||||
</xsl:call-template>
|
||||
|
||||
<xsl:call-template name="allElements">
|
||||
<xsl:with-param name="sourceElement" select="//dc:identifier[starts-with(., 'http')]" />
|
||||
<xsl:with-param name="targetElement" select="'dc:identifier'" />
|
||||
</xsl:call-template>
|
||||
<xsl:for-each select="//dc:relation">
|
||||
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||
<oaf:projectid>
|
||||
<xsl:value-of select="concat($varFP7, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/fp7/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||
</oaf:projectid>
|
||||
</xsl:if>
|
||||
<xsl:if test="matches(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', 'i')">
|
||||
<oaf:projectid>
|
||||
<xsl:value-of select="concat($varH2020, replace(normalize-space(.), '(info:eu-repo/grantagreement/ec/h2020/)(\d\d\d\d\d\d)(.*)', '$2', 'i'))" />
|
||||
</oaf:projectid>
|
||||
</xsl:if>
|
||||
</xsl:for-each>
|
||||
|
||||
<oaf:accessrights>
|
||||
<xsl:value-of select="vocabulary:clean( //dc:rights, 'dnet:access_modes')" />
|
||||
</oaf:accessrights>
|
||||
|
||||
<xsl:variable name="varCobjCategory" select="vocabulary:clean( //dc:type, 'dnet:publication_resource')" />
|
||||
<xsl:variable name="varSuperType" select="vocabulary:clean( $varCobjCategory, 'dnet:result_typologies')" />
|
||||
<dr:CobjCategory type="{$varSuperType}"><xsl:value-of select="$varCobjCategory" /></dr:CobjCategory>
|
||||
|
||||
|
||||
<xsl:variable name="varRefereedConvt" select="for $i in (//dc:type, //dc:description, //oai:setSpec) return vocabulary:clean( normalize-space($i), 'dnet:review_levels')" />
|
||||
<xsl:variable name="varRefereedIdntf" select="(//*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '(^|.*[\.\-_/\s\(\)%\d#])pre[\.\-_/\s\(\)%\d#]?prints?([\.\-_/\s\(\)%\d#].*)?$')][count(//dc:identifier) = 1]/'0002', //*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '(^|.*[\.\-_/\s\(\)%\d#])refereed([\.\-_/\s\(\)\d%\d#].*)?$')]/'0001', //*[string(node-name(.)) = 'dc:identifier' and matches(lower-case(.), '.*-peer-reviewed-(fulltext-)?article-.*')]/'0001')" />
|
||||
<xsl:variable name="varRefereedSourc" select="//*[string(node-name(.)) = ('dc:source', 'dc:publisher') and matches(lower-case(.), '^(.*\s)?pre[\s\-_]*prints?([\s\.,].*)?$')]/'0002'" />
|
||||
<xsl:variable name="varRefereedDescr" select="(//dc:description[matches(lower-case(.), '.*(this\s*book|this\s*volume|it)\s*constitutes\s*the\s*(thoroughly\s*)?refereed') or matches(lower-case(.), '.*peer[\.\-_/\s\(\)]?review\s*under\s*responsibility\s*of.*') or matches(lower-case(.), '(this|a)\s*(article|preprint)\s*(has\s*been\s*)?(peer[\-\s]*)?reviewed\s*and\s*recommended\s*by\s*peer[\-\s]*community')]/'0001', //dc:description[matches(., '^version\s*(préliminaire.*|preliminary.*|0$)')]/'0002')" />
|
||||
<xsl:variable name="varRefereedTitle" select="(//dc:title[matches(lower-case(.), '.*\[.*peer[\s\-\._]*review\s*:.*\]\s*$')]/'0001', //dc:title[matches(lower-case(.), '.*\(\s*pre[\s\-\._]*prints?\s*\)\s*$')]/'0002')" />
|
||||
<xsl:variable name="varRefereedSubjt" select="(//dc:subject[matches(lower-case(.), '^\s*refereed\s*$')][//oaf:datasourceprefix = 'narcis______']/'0001', //dc:subject[matches(lower-case(.), '^\s*no[nt].{0,3}refereed\s*$')][//oaf:datasourceprefix = 'narcis______']/'0002')" />
|
||||
<xsl:variable name="varRefereed" select="($varRefereedConvt, $varRefereedIdntf, $varRefereedSourc, $varRefereedDescr, $varRefereedTitle, $varRefereedSubjt)" />
|
||||
<xsl:choose>
|
||||
<xsl:when test="count($varRefereed[. = '0001']) > 0">
|
||||
<oaf:refereed>
|
||||
<xsl:value-of select="'0001'" />
|
||||
</oaf:refereed>
|
||||
</xsl:when>
|
||||
<xsl:when test="count($varRefereed[. = '0002']) > 0">
|
||||
<oaf:refereed>
|
||||
<xsl:value-of select="'0002'" />
|
||||
</oaf:refereed>
|
||||
</xsl:when>
|
||||
</xsl:choose>
|
||||
|
||||
<oaf:dateAccepted>
|
||||
<xsl:value-of select="dateCleaner:dateISO( //dc:date[1] )" />
|
||||
</oaf:dateAccepted>
|
||||
|
||||
<xsl:if test="//dc:relation[starts-with(., 'http')] and //dc:rights[.='info:eu-repo/semantics/openAccess']">
|
||||
<oaf:fulltext>
|
||||
<xsl:value-of select="//dc:relation[starts-with(., 'http')]" />
|
||||
</oaf:fulltext>
|
||||
</xsl:if>
|
||||
|
||||
<oaf:hostedBy name="{$varOfficialName}" id="{$varDataSourceId}" />
|
||||
<oaf:collectedFrom name="{$varOfficialName}" id="{$varDataSourceId}ß" />
|
||||
|
||||
<xsl:variable name="varKnownFileEndings" select="('.bmp', '.doc', '.docx', '.epub', '.flv', '.jpeg', '.jpg', '.m4v', '.mp4', '.mpg', '.odp', '.pdf', '.png', '.ppt', '.tiv', '.txt', '.xls', '.xlsx', '.zip')" />
|
||||
<xsl:variable name="varIdDoi" select="distinct-values((//dc:identifier[starts-with(., '10.')][matches(., '(10[.][0-9]{4,}[^\s/>]*/[^\s>]+)')], //dc:identifier[starts-with(., 'http') and (contains(., '://dx.doi.org/10.') or contains(., '://doi.org/10.'))]/substring-after(., 'doi.org/'), //dc:identifier[starts-with(lower-case(.), 'doi:10.')]/substring-after(lower-case(.), 'doi:')))" />
|
||||
<xsl:for-each select="$varIdDoi">
|
||||
<oaf:identifier identifierType="doi">
|
||||
<xsl:value-of select="." />
|
||||
</oaf:identifier>
|
||||
</xsl:for-each>
|
||||
|
||||
<xsl:variable name="varIdHdl" select="distinct-values(//dc:identifier[starts-with(., 'http') and contains(., '://hdl.handle.net/')]/substring-after(., 'hdl.handle.net/'))" />
|
||||
<xsl:for-each select="$varIdHdl" >
|
||||
<oaf:identifier identifierType="handle">
|
||||
<xsl:value-of select="." />
|
||||
</oaf:identifier>
|
||||
</xsl:for-each>
|
||||
|
||||
<xsl:variable name="varIdUrn" select="distinct-values(//dc:identifier[starts-with(., 'urn:nbn:nl:') or starts-with(., 'URN:NBN:NL:')])" />
|
||||
<xsl:for-each select="$varIdUrn">
|
||||
<oaf:identifier identifierType="urn">
|
||||
<xsl:value-of select="." />
|
||||
</oaf:identifier>
|
||||
</xsl:for-each>
|
||||
|
||||
<xsl:variable name="varOrigBaseUrl" select="//*[local-name() = 'about']/*[local-name() = 'provenance']//*[local-name() = 'originDescription' and not(./*[local-name() = 'originDescription'])]/*[local-name() = 'baseURL']" />
|
||||
<xsl:variable name="varIdLdpg" select="distinct-values(//dc:identifier[(contains(substring-after(., '://'), '/') and contains($varOrigBaseUrl, substring-before(substring-after(., '://'), '/'))) or (contains(substring-after(., '://'), ':') and contains($varOrigBaseUrl, substring-before(substring-after(., '://'), ':')))][not(replace(lower-case(.), '.*(\.[a-z]*)$', '$1') = $varKnownFileEndings)])" />
|
||||
<xsl:for-each select="$varIdLdpg">
|
||||
<oaf:identifier identifierType="landingPage">
|
||||
<xsl:value-of select="." />
|
||||
</oaf:identifier>
|
||||
</xsl:for-each>
|
||||
|
||||
<xsl:variable name="varIdUrl" select="distinct-values(//dc:identifier[starts-with(., 'http')][not(contains(., '://dx.doi.org/') or contains(., '://doi.org/') or contains(., '://hdl.handle.net/'))][count(index-of($varIdLdpg, .)) = 0])" />
|
||||
<xsl:for-each select="$varIdUrl">
|
||||
<oaf:identifier identifierType="url">
|
||||
<xsl:value-of select="." />
|
||||
</oaf:identifier>
|
||||
</xsl:for-each>
|
||||
|
||||
<xsl:for-each select="//oai:setSpec">
|
||||
<xsl:variable name="rorDsId" select="vocabulary:clean(., 'cnr:institutes')" />
|
||||
<xsl:if test="contains($rorDsId, '/ror.org/')">
|
||||
<oaf:relation relType="resultOrganization" subRelType="affiliation" relClass="hasAuthorInstitution">
|
||||
<xsl:value-of select="concat('ror_________::', $rorDsId)" />
|
||||
</oaf:relation>
|
||||
</xsl:if>
|
||||
</xsl:for-each>
|
||||
|
||||
</metadata>
|
||||
|
||||
<xsl:copy-of select="//*[local-name() = 'about']" />
|
||||
</record>
|
||||
</xsl:template>
|
||||
|
||||
|
||||
<xsl:template name="allElements">
|
||||
<xsl:param name="sourceElement" />
|
||||
<xsl:param name="targetElement" />
|
||||
|
||||
<xsl:for-each select="$sourceElement">
|
||||
<xsl:element name="{$targetElement}">
|
||||
<xsl:value-of select="normalize-space(.)" />
|
||||
</xsl:element>
|
||||
</xsl:for-each>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="//*[local-name() = 'header']">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*" />
|
||||
<xsl:element name="dr:dateOfTransformation">
|
||||
<xsl:value-of select="$transDate" />
|
||||
</xsl:element>
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
<xsl:template match="node()|@*">
|
||||
<xsl:copy>
|
||||
<xsl:apply-templates select="node()|@*" />
|
||||
</xsl:copy>
|
||||
</xsl:template>
|
||||
|
||||
</xsl:stylesheet>
|
|
@ -0,0 +1,57 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<record xmlns="http://www.openarchives.org/OAI/2.0/">
|
||||
<header>
|
||||
<identifier><![CDATA[oai:it.cnr:prodotti:433382]]></identifier>
|
||||
<datestamp><![CDATA[2020-11-30T15:32:03Z]]></datestamp>
|
||||
<setSpec><![CDATA[openaire]]></setSpec>
|
||||
<setSpec><![CDATA[CDS027]]></setSpec>
|
||||
<setSpec><![CDATA[CDS080]]></setSpec>
|
||||
</header>
|
||||
<metadata>
|
||||
<oai_dc:dc xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
|
||||
<dc:type><![CDATA[info:eu-repo/semantics/conferenceObject]]></dc:type>
|
||||
<dc:type><![CDATA[Presentazione]]></dc:type>
|
||||
<dc:title><![CDATA[A multiscale observing approach for understanding acidification process in a marginal sea (northern Adriatic)]]></dc:title>
|
||||
<dc:creator><![CDATA[Cantoni C.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Barba L.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Bastianini M.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Bortoluzzi G.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Celio M.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Chiggiato J.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Cozzi S.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Luchetta A.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Ravaioli M.]]></dc:creator>
|
||||
<dc:creator><![CDATA[Sparnocchia S.]]></dc:creator>
|
||||
<dc:language><![CDATA[eng]]></dc:language>
|
||||
<dc:description><![CDATA[The Northern Adriatic is a shallow, semi-enclosed industrialized sub-basin of the Mediterranean affected by significant ecosystem
|
||||
changes, which are studied through several research activities including the long-term monitoring of ILTER international network.
|
||||
Changes of pHT (-0.06) and TA (+74 ?mol/kg) in dense winter waters over the last 25 years already showed that this area is prone to
|
||||
acidification process under a complex inorganic carbon chemistry variability. To understand these changes, monthly sampling of the
|
||||
main biogeochemical and biological parameters has been carrying out since 2008 by a fixed station (PALOMA, Gulf of Trieste). In 2013
|
||||
the site has been implemented with continuous pCO2 measurements at 3 m depth and has been regularly visiting during basin wide
|
||||
surveys for the last two years. The combination of automated in situ measurements, monthly samplings and basin scale
|
||||
oceanographic cruises is used to better understand the processes controlling air-sea CO2 fluxes and inorganic carbon chemistry under
|
||||
three different scenarios: an extreme event of dense water formation, the phytoplankton blooms associated with riverine inputs and
|
||||
the late-summer marked oxygen under saturation in the deeper waters.]]></dc:description>
|
||||
<dc:source><![CDATA[ASLO - Aquatic Sciences Meeting, Granada, Spagna, 22-27/02/2015]]></dc:source>
|
||||
<dc:source><![CDATA[info:cnr-pdr/source/autori:Cantoni C., Barba L., Bastianini M., Bortoluzzi G., Celio M., Chiggiato J., Cozzi S., Luchetta A., Ravaioli M., Sparnocchia S./congresso_nome:ASLO - Aquatic Sciences Meeting/congresso_luogo:Granada, Spagna/congresso_data:22-27%2F02%2F2015/anno:2015/pagina_da:/pagina_a:/intervallo_pagine:]]></dc:source>
|
||||
<dc:date><![CDATA[2015]]></dc:date>
|
||||
<dc:identifier><![CDATA[http://www.cnr.it/prodotto/i/433382]]></dc:identifier>
|
||||
<dc:identifier><![CDATA[https://publications.cnr.it/doc/433382]]></dc:identifier>
|
||||
<dc:identifier><![CDATA[http://sgmeet.com/aslo/granada2015/]]></dc:identifier>
|
||||
<dc:relation><![CDATA[info:eu-repo/grantAgreement/EC/FP7/211574//Integrated Carbon Observation System/ICOS]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:502/SPARNOCCHIA/STEFANIA]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:990/BASTIANINI/MAURO]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:5185/BARBA/LUISA]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:5453/COZZI/STEFANO]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:12491/LUCHETTA/ANNA]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:12837/CANTONI/CAROLINA]]></dc:relation>
|
||||
<dc:relation><![CDATA[info:cnr-pdr/author/matricola:18161/RAVAIOLI/MARIANGELA]]></dc:relation>
|
||||
<dc:rights><![CDATA[info:eu-repo/semantics/openAccess]]></dc:rights>
|
||||
<dc:subject><![CDATA[ocean acidification]]></dc:subject>
|
||||
<dc:subject><![CDATA[Northern Adriatic Sea]]></dc:subject>
|
||||
<dc:subject><![CDATA[inorganic carbon system]]></dc:subject>
|
||||
<dc:subject><![CDATA[PALOMA]]></dc:subject>
|
||||
</oai_dc:dc>
|
||||
</metadata>
|
||||
</record>
|
|
@ -1232,4 +1232,268 @@ dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-学術雑誌論文(査
|
|||
dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-紀要論文(査読有り)
|
||||
dnet:review_levels @=@ 0001 @=@ 印刷物/電子媒体-雑誌記事(査読有り)
|
||||
dnet:review_levels @=@ 0001 @=@ 原著論文(査読有り)
|
||||
dnet:review_levels @=@ 0001 @=@ 査読論文
|
||||
dnet:review_levels @=@ 0001 @=@ 査読論文
|
||||
cnr:institutes @=@ https://ror.org/00brf2d87 @=@ CDS001
|
||||
cnr:institutes @=@ https://ror.org/00brf2d87 @=@ IDASC - Istituto di Acustica e Sensoristica \"Orso Mario Corbino\"
|
||||
cnr:institutes @=@ https://ror.org/006qkqr45 @=@ CDS002
|
||||
cnr:institutes @=@ https://ror.org/006qkqr45 @=@ IAMC - Istituto per l'ambiente marino costiero
|
||||
cnr:institutes @=@ https://ror.org/054ye0e45 @=@ CDS003
|
||||
cnr:institutes @=@ https://ror.org/054ye0e45 @=@ IASI - Istituto di analisi dei sistemi ed informatica \"Antonio Ruberti\"
|
||||
cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ CDS004
|
||||
cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ IAC - Istituto per le applicazioni del calcolo \"Mauro Picone\"
|
||||
cnr:institutes @=@ https://ror.org/000sy1f36 @=@ CDS005
|
||||
cnr:institutes @=@ https://ror.org/000sy1f36 @=@ IASF - Istituto di astrofisica spaziale e fisica cosmica
|
||||
cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ CDS006
|
||||
cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ IBAM - Istituto per i beni archeologici e monumentali
|
||||
cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ CDS007
|
||||
cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ IBP - Istituto di biochimica delle proteine
|
||||
cnr:institutes @=@ https://ror.org/041xzk838 @=@ CDS008
|
||||
cnr:institutes @=@ https://ror.org/041xzk838 @=@ IBF - Istituto di biofisica
|
||||
cnr:institutes @=@ https://ror.org/00s2j5046 @=@ CDS009
|
||||
cnr:institutes @=@ https://ror.org/00s2j5046 @=@ IBFM - Istituto di bioimmagini e fisiologia molecolare
|
||||
cnr:institutes @=@ __CDS010__ @=@ CDS010
|
||||
cnr:institutes @=@ __CDS010__ @=@ IBAF - Istituto di biologia agro-ambientale e forestale
|
||||
cnr:institutes @=@ __CDS011__ @=@ CDS011
|
||||
cnr:institutes @=@ __CDS011__ @=@ IBC - Istituto di biologia cellulare
|
||||
cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ CDS012
|
||||
cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ IBBA - Istituto di biologia e biotecnologia agraria
|
||||
cnr:institutes @=@ https://ror.org/01nyatq71 @=@ CDS013
|
||||
cnr:institutes @=@ https://ror.org/01nyatq71 @=@ IBPM - Istituto di biologia e patologia molecolari
|
||||
cnr:institutes @=@ https://ror.org/01dy2q607 @=@ CDS014
|
||||
cnr:institutes @=@ https://ror.org/01dy2q607 @=@ IBIM - Istituto di biomedicina e di immunologia molecolare \"Alberto Monroy\"
|
||||
cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ CDS015
|
||||
cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ IBIOM - Istituto di Biomembrane, Bioenergetica e Biotecnologie Molecolari
|
||||
cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ CDS016
|
||||
cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ IBIMET - Istituto di biometeorologia
|
||||
cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ CDS017
|
||||
cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ IBB - Istituto di biostrutture e bioimmagini
|
||||
cnr:institutes @=@ https://ror.org/04r5fge26 @=@ CDS018
|
||||
cnr:institutes @=@ https://ror.org/04r5fge26 @=@ ICAR - Istituto di calcolo e reti ad alte prestazioni
|
||||
cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ CDS019
|
||||
cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ ICB - Istituto di chimica biomolecolare
|
||||
cnr:institutes @=@ https://ror.org/02fkw1114 @=@ CDS020
|
||||
cnr:institutes @=@ https://ror.org/02fkw1114 @=@ ICCOM - Istituto di chimica dei composti organo metallici
|
||||
cnr:institutes @=@ https://ror.org/0141vn777 @=@ CDS021
|
||||
cnr:institutes @=@ https://ror.org/0141vn777 @=@ ICRM - Istituto di chimica del riconoscimento molecolare
|
||||
cnr:institutes @=@ __CDS022__ @=@ CDS022
|
||||
cnr:institutes @=@ __CDS022__ @=@ ICTP - Istituto di chimica e tecnologia dei polimeri
|
||||
cnr:institutes @=@ __CDS023__ @=@ CDS023
|
||||
cnr:institutes @=@ __CDS023__ @=@ ICIS - Istituto di chimica inorganica e delle superfici
|
||||
cnr:institutes @=@ https://ror.org/00be3zh53 @=@ CDS024
|
||||
cnr:institutes @=@ https://ror.org/00be3zh53 @=@ ISASI - Istituto di Scienze Applicate e Sistemi Intelligenti \"Eduardo Caianiello\"
|
||||
cnr:institutes @=@ __CDS025__ @=@ CDS025
|
||||
cnr:institutes @=@ __CDS025__ @=@ ICEVO - Istituto di Studi sulle Civiltà dell'Egeo e del Vicino Oriente
|
||||
cnr:institutes @=@ https://ror.org/02ynrme92 @=@ CDS026
|
||||
cnr:institutes @=@ https://ror.org/02ynrme92 @=@ ICVBC - Istituto per la conservazione e valorizzazione dei beni culturali
|
||||
cnr:institutes @=@ https://ror.org/05wba8r86 @=@ CDS027
|
||||
cnr:institutes @=@ https://ror.org/05wba8r86 @=@ IC - Istituto di cristallografia
|
||||
cnr:institutes @=@ https://ror.org/03z58xd74 @=@ CDS028
|
||||
cnr:institutes @=@ https://ror.org/03z58xd74 @=@ IDPA - Istituto per la dinamica dei processi ambientali
|
||||
cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ CDS029
|
||||
cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ IEIIT - Istituto di elettronica e di ingegneria dell'informazione e delle telecomunicazioni
|
||||
cnr:institutes @=@ https://ror.org/04sn06036 @=@ CDS030
|
||||
cnr:institutes @=@ https://ror.org/04sn06036 @=@ IEOS - Istituto per l'endocrinologia e l'oncologia \"Gaetano Salvatore\"
|
||||
cnr:institutes @=@ https://ror.org/01rg40y89 @=@ CDS031
|
||||
cnr:institutes @=@ https://ror.org/01rg40y89 @=@ ICMATE - Istituto di Chimica della Materia Condensata e di Tecnologie per l'Energia
|
||||
cnr:institutes @=@ https://ror.org/00dqega85 @=@ CDS032
|
||||
cnr:institutes @=@ https://ror.org/00dqega85 @=@ IFAC - Istituto di fisica applicata \"Nello Carrara\"
|
||||
cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ CDS033
|
||||
cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ IFP - Istituto di fisica del plasma \"Piero Caldirola\"
|
||||
cnr:institutes @=@ __CDS034__ @=@ CDS034
|
||||
cnr:institutes @=@ __CDS034__ @=@ IFSI - Istituto di fisica dello spazio interplanetario
|
||||
cnr:institutes @=@ https://ror.org/01kdj2848 @=@ CDS035
|
||||
cnr:institutes @=@ https://ror.org/01kdj2848 @=@ IFC - Istituto di fisiologia clinica
|
||||
cnr:institutes @=@ https://ror.org/049ebw417 @=@ CDS036
|
||||
cnr:institutes @=@ https://ror.org/049ebw417 @=@ IFN - Istituto di fotonica e nanotecnologie
|
||||
cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ CDS037
|
||||
cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ IGI - Istituto gas ionizzati
|
||||
cnr:institutes @=@ __CDS038__ @=@ CDS038
|
||||
cnr:institutes @=@ __CDS038__ @=@ IGP - Istituto di genetica delle popolazioni
|
||||
cnr:institutes @=@ https://ror.org/04hadk112 @=@ CDS039
|
||||
cnr:institutes @=@ https://ror.org/04hadk112 @=@ IGB - Istituto di genetica e biofisica \"Adriano Buzzati Traverso\"
|
||||
cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ CDS040
|
||||
cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ IGM - Istituto di genetica molecolare \"Luigi Luca Cavalli Sforza\"
|
||||
cnr:institutes @=@ https://ror.org/01gtsa866 @=@ CDS041
|
||||
cnr:institutes @=@ https://ror.org/01gtsa866 @=@ IBBR - Istituto di Bioscienze e Biorisorse
|
||||
cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ CDS042
|
||||
cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ IGAG - Istituto di geologia ambientale e geoingegneria
|
||||
cnr:institutes @=@ https://ror.org/015bmra78 @=@ CDS043
|
||||
cnr:institutes @=@ https://ror.org/015bmra78 @=@ IGG - Istituto di geoscienze e georisorse
|
||||
cnr:institutes @=@ https://ror.org/02gdcn153 @=@ CDS044
|
||||
cnr:institutes @=@ https://ror.org/02gdcn153 @=@ IIT - Istituto di informatica e telematica
|
||||
cnr:institutes @=@ __CDS045__ @=@ CDS045
|
||||
cnr:institutes @=@ __CDS045__ @=@ ISIB - Istituto di ingegneria biomedica
|
||||
cnr:institutes @=@ https://ror.org/05hky6p02 @=@ CDS046
|
||||
cnr:institutes @=@ https://ror.org/05hky6p02 @=@ IIA - Istituto sull'inquinamento atmosferico
|
||||
cnr:institutes @=@ https://ror.org/011n2hw53 @=@ CDS047
|
||||
cnr:institutes @=@ https://ror.org/011n2hw53 @=@ ILIESI - Istituto per il lessico intellettuale europeo e storia delle idee
|
||||
cnr:institutes @=@ https://ror.org/028g3pe33 @=@ CDS048
|
||||
cnr:institutes @=@ https://ror.org/028g3pe33 @=@ ILC - Istituto di linguistica computazionale \"Antonio Zampolli\"
|
||||
cnr:institutes @=@ __CDS049__ @=@ CDS049
|
||||
cnr:institutes @=@ __CDS049__ @=@ IMAMOTER - Istituto per le macchine agricole e movimento terra
|
||||
cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ CDS050
|
||||
cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ IMATI - Istituto di matematica applicata e tecnologie informatiche \"Enrico Magenes\"
|
||||
cnr:institutes @=@ __CDS051__ @=@ CDS051
|
||||
cnr:institutes @=@ __CDS051__ @=@ IMCB - Istituto per i materiali compositi e biomedici
|
||||
cnr:institutes @=@ https://ror.org/00z8ws214 @=@ CDS052
|
||||
cnr:institutes @=@ https://ror.org/00z8ws214 @=@ IMEM - Istituto dei materiali per l'elettronica ed il magnetismo
|
||||
cnr:institutes @=@ __CDS053__ @=@ CDS053
|
||||
cnr:institutes @=@ __CDS053__ @=@ ISB - Istituto per i Sistemi Biologici
|
||||
cnr:institutes @=@ https://ror.org/00bc51d88 @=@ CDS054
|
||||
cnr:institutes @=@ https://ror.org/00bc51d88 @=@ NANOTEC - Istituto di Nanotecnologia
|
||||
cnr:institutes @=@ https://ror.org/024ye7w89 @=@ CDS055
|
||||
cnr:institutes @=@ https://ror.org/024ye7w89 @=@ IMAA - Istituto di metodologie per l'analisi ambientale
|
||||
cnr:institutes @=@ __CDS056__ @=@ CDS056
|
||||
cnr:institutes @=@ __CDS056__ @=@ IMGC - Istituto di metrologia \"Gustavo Colonnetti\"
|
||||
cnr:institutes @=@ https://ror.org/05vk2g845 @=@ CDS057
|
||||
cnr:institutes @=@ https://ror.org/05vk2g845 @=@ IMM - Istituto per la microelettronica e microsistemi
|
||||
cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ CDS058
|
||||
cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ IM - Istituto motori
|
||||
cnr:institutes @=@ __CDS059__ @=@ CDS059
|
||||
cnr:institutes @=@ __CDS059__ @=@ INMM - Istituto di neurobiologia e medicina molecolare
|
||||
cnr:institutes @=@ https://ror.org/02dr63s31 @=@ CDS060
|
||||
cnr:institutes @=@ https://ror.org/02dr63s31 @=@ IRGB - Istituto di Ricerca Genetica e Biomedica
|
||||
cnr:institutes @=@ https://ror.org/0240rwx68 @=@ CDS061
|
||||
cnr:institutes @=@ https://ror.org/0240rwx68 @=@ IN - Istituto di neuroscienze
|
||||
cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ CDS062
|
||||
cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ OVI - Istituto opera del vocabolario italiano
|
||||
cnr:institutes @=@ https://ror.org/05patmk97 @=@ CDS063
|
||||
cnr:institutes @=@ https://ror.org/05patmk97 @=@ IPCF - Istituto per i processi chimico-fisici
|
||||
cnr:institutes @=@ __CDS064__ @=@ CDS064
|
||||
cnr:institutes @=@ __CDS064__ @=@ IPP - Istituto per la protezione delle piante
|
||||
cnr:institutes @=@ https://ror.org/029st3z03 @=@ CDS065
|
||||
cnr:institutes @=@ https://ror.org/029st3z03 @=@ IRA - Istituto di radioastronomia
|
||||
cnr:institutes @=@ https://ror.org/0040zx077 @=@ CDS066
|
||||
cnr:institutes @=@ https://ror.org/0040zx077 @=@ IRPI - Istituto di ricerca per la protezione idrogeologica
|
||||
cnr:institutes @=@ https://ror.org/044bfsy89 @=@ CDS067
|
||||
cnr:institutes @=@ https://ror.org/044bfsy89 @=@ IRCRES - Istituto di Ricerca sulla Crescita Economica Sostenibile
|
||||
cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ CDS068
|
||||
cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ IRPPS - Istituto di ricerche sulla popolazione e le politiche sociali
|
||||
cnr:institutes @=@ https://ror.org/02db0kh50 @=@ CDS069
|
||||
cnr:institutes @=@ https://ror.org/02db0kh50 @=@ IRSA - Istituto di ricerca sulle acque
|
||||
cnr:institutes @=@ https://ror.org/05813wx75 @=@ CDS070
|
||||
cnr:institutes @=@ https://ror.org/05813wx75 @=@ IRC - Istituto di ricerche sulla combustione
|
||||
cnr:institutes @=@ https://ror.org/04vnwke91 @=@ CDS071
|
||||
cnr:institutes @=@ https://ror.org/04vnwke91 @=@ IRISS - Istituto di Ricerca su Innovazione e Servizi per lo Sviluppo
|
||||
cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ CDS072
|
||||
cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ IREA - Istituto per il rilevamento elettromagnetico dell'ambiente
|
||||
cnr:institutes @=@ https://ror.org/01j6drw72 @=@ CDS073
|
||||
cnr:institutes @=@ https://ror.org/01j6drw72 @=@ ISTEC - Istituto di scienza e tecnologia dei materiali ceramici
|
||||
cnr:institutes @=@ https://ror.org/05kacka20 @=@ CDS074
|
||||
cnr:institutes @=@ https://ror.org/05kacka20 @=@ ISTI - Istituto di scienza e tecnologie dell'informazione \"Alessandro Faedo\"
|
||||
cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ CDS075
|
||||
cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ ISAC - Istituto di scienze dell'atmosfera e del clima
|
||||
cnr:institutes @=@ https://ror.org/0013zhk30 @=@ CDS076
|
||||
cnr:institutes @=@ https://ror.org/0013zhk30 @=@ ISA - Istituto di Scienze dell'Alimentazione
|
||||
cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ CDS077
|
||||
cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ ISPA - Istituto di scienze delle produzioni alimentari
|
||||
cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ CDS078
|
||||
cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ ISTC - Istituto di scienze e tecnologie della cognizione
|
||||
cnr:institutes @=@ https://ror.org/032tyv240 @=@ CDS079
|
||||
cnr:institutes @=@ https://ror.org/032tyv240 @=@ ISTM - Istituto di scienze e tecnologie molecolari
|
||||
cnr:institutes @=@ https://ror.org/02hdf6119 @=@ CDS080
|
||||
cnr:institutes @=@ https://ror.org/02hdf6119 @=@ ISMAR - Istituto di scienze marine
|
||||
cnr:institutes @=@ https://ror.org/01yg57d71 @=@ CDS081
|
||||
cnr:institutes @=@ https://ror.org/01yg57d71 @=@ ISN - Istituto di scienze neurologiche
|
||||
cnr:institutes @=@ https://ror.org/021z1mz76 @=@ CDS082
|
||||
cnr:institutes @=@ https://ror.org/021z1mz76 @=@ ISOF - Istituto per la sintesi organica e la fotoreattività
|
||||
cnr:institutes @=@ https://ror.org/01wqae691 @=@ CDS083
|
||||
cnr:institutes @=@ https://ror.org/01wqae691 @=@ ISPAAM - Istituto per il sistema produzione animale in ambiente Mediterraneo
|
||||
cnr:institutes @=@ __CDS084__ @=@ CDS084
|
||||
cnr:institutes @=@ __CDS084__ @=@ ISAFoM - Istituto per i sistemi agricoli e forestali del mediterraneo
|
||||
cnr:institutes @=@ https://ror.org/00awwz417 @=@ CDS085
|
||||
cnr:institutes @=@ https://ror.org/00awwz417 @=@ ISPF - Istituto per la storia del pensiero filosofico e scientifico moderno
|
||||
cnr:institutes @=@ https://ror.org/03a111314 @=@ CDS086
|
||||
cnr:institutes @=@ https://ror.org/03a111314 @=@ ISEM - Istituto di storia dell'Europa mediterranea
|
||||
cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ CDS087
|
||||
cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ ISM - Istituto di struttura della materia
|
||||
cnr:institutes @=@ https://ror.org/035y5td47 @=@ CDS088
|
||||
cnr:institutes @=@ https://ror.org/035y5td47 @=@ ISGI - Istituto di studi giuridici internazionali
|
||||
cnr:institutes @=@ __CDS089__ @=@ CDS089
|
||||
cnr:institutes @=@ __CDS089__ @=@ ISPRI - Istituto sperimentale di studi socio - economici sull'innovazione e le politiche della ricerca
|
||||
cnr:institutes @=@ https://ror.org/051t1q308 @=@ CDS090
|
||||
cnr:institutes @=@ https://ror.org/051t1q308 @=@ ISSIA - Istituto di studi sui sistemi intelligenti per l'automazione
|
||||
cnr:institutes @=@ https://ror.org/05k3cs357 @=@ CDS091
|
||||
cnr:institutes @=@ https://ror.org/05k3cs357 @=@ ISSIRFA - Istituto di studi sui sistemi regionali federali e sulle autonomie \"Massimo Severo Giannini\"
|
||||
cnr:institutes @=@ https://ror.org/02gcxw165 @=@ CDS092
|
||||
cnr:institutes @=@ https://ror.org/02gcxw165 @=@ ISMA - Istituto di Studi sul Mediterraneo Antico
|
||||
cnr:institutes @=@ https://ror.org/05db0es39 @=@ CDS093
|
||||
cnr:institutes @=@ https://ror.org/05db0es39 @=@ ISMed - Istituto di studi sul Mediterraneo
|
||||
cnr:institutes @=@ https://ror.org/029k6t707 @=@ CDS094
|
||||
cnr:institutes @=@ https://ror.org/029k6t707 @=@ ISE - Istituto per lo studio degli ecosistemi
|
||||
cnr:institutes @=@ https://ror.org/00w6r1881 @=@ CDS095
|
||||
cnr:institutes @=@ https://ror.org/00w6r1881 @=@ ISMN - Istituto per lo studio dei materiali nanostrutturati
|
||||
cnr:institutes @=@ https://ror.org/01mfmr054 @=@ CDS096
|
||||
cnr:institutes @=@ https://ror.org/01mfmr054 @=@ ISMAC - Istituto per lo studio delle macromolecole
|
||||
cnr:institutes @=@ https://ror.org/058nrs650 @=@ CDS097
|
||||
cnr:institutes @=@ https://ror.org/058nrs650 @=@ ITM - Istituto per la tecnologia delle membrane
|
||||
cnr:institutes @=@ https://ror.org/0331xj092 @=@ CDS098
|
||||
cnr:institutes @=@ https://ror.org/0331xj092 @=@ ITABC - Istituto per le tecnologie applicate ai beni culturali
|
||||
cnr:institutes @=@ https://ror.org/052q58629 @=@ CDS099
|
||||
cnr:institutes @=@ https://ror.org/052q58629 @=@ ITAE - Istituto di tecnologie avanzate per l'energia \"Nicola Giordano\"
|
||||
cnr:institutes @=@ https://ror.org/04ehykb85 @=@ CDS100
|
||||
cnr:institutes @=@ https://ror.org/04ehykb85 @=@ ITB - Istituto di tecnologie biomediche
|
||||
cnr:institutes @=@ https://ror.org/0221agg28 @=@ CDS101
|
||||
cnr:institutes @=@ https://ror.org/0221agg28 @=@ ITC - Istituto per le tecnologie della costruzione
|
||||
cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ CDS102
|
||||
cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ ITD - Istituto per le tecnologie didattiche
|
||||
cnr:institutes @=@ __CDS103__ @=@ CDS103
|
||||
cnr:institutes @=@ __CDS103__ @=@ STIIMA - Istituto di Sistemi e Tecnologie Industriali Intelligenti per il Manifatturiero Avanzato
|
||||
cnr:institutes @=@ https://ror.org/01as2bh37 @=@ CDS104
|
||||
cnr:institutes @=@ https://ror.org/01as2bh37 @=@ ITTIG - Istituto di teoria e tecniche dell'informazione giuridica
|
||||
cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ CDS105
|
||||
cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ ITOI - Istituto per i trapianti d'organo e immunocitologia
|
||||
cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ CDS106
|
||||
cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ IVALSA - Istituto per la valorizzazione del legno e delle specie arboree
|
||||
cnr:institutes @=@ __CDS107__ @=@ CDS107
|
||||
cnr:institutes @=@ __CDS107__ @=@ IVV - Istituto di virologia vegetale
|
||||
cnr:institutes @=@ https://ror.org/013nxtf56 @=@ CDS108
|
||||
cnr:institutes @=@ https://ror.org/013nxtf56 @=@ IRSIG - Istituto di ricerca sui sistemi giudiziari
|
||||
cnr:institutes @=@ https://ror.org/05rcgef49 @=@ CDS109
|
||||
cnr:institutes @=@ https://ror.org/05rcgef49 @=@ ISC - Istituto dei sistemi complessi
|
||||
cnr:institutes @=@ __CDS110__ @=@ CDS110
|
||||
cnr:institutes @=@ __CDS110__ @=@ INFM - Centro di responsabilità scientifica INFM
|
||||
cnr:institutes @=@ https://ror.org/02dp3a879 @=@ CDS111
|
||||
cnr:institutes @=@ https://ror.org/02dp3a879 @=@ INO - Istituto nazionale di ottica
|
||||
cnr:institutes @=@ __CDS112__ @=@ CDS112
|
||||
cnr:institutes @=@ __CDS112__ @=@ IDAIC - Centro di responsabilità di attività scientifica IDAIC
|
||||
cnr:institutes @=@ https://ror.org/00p03yg71 @=@ CDS113
|
||||
cnr:institutes @=@ https://ror.org/00p03yg71 @=@ SPIN - Istituto superconduttori, materiali innovativi e dispositivi
|
||||
cnr:institutes @=@ https://ror.org/00yfw2296 @=@ CDS114
|
||||
cnr:institutes @=@ https://ror.org/00yfw2296 @=@ IOM - Istituto officina dei materiali
|
||||
cnr:institutes @=@ https://ror.org/0042e5975 @=@ CDS115
|
||||
cnr:institutes @=@ https://ror.org/0042e5975 @=@ NANO - Istituto Nanoscienze
|
||||
cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ CDS116
|
||||
cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ IFT - Istituto di Farmacologia Traslazionale
|
||||
cnr:institutes @=@ https://ror.org/040xhth73 @=@ CDS117
|
||||
cnr:institutes @=@ https://ror.org/040xhth73 @=@ IBCN - Istituto di Biologia Cellulare e Neurobiologia
|
||||
cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ CDS118
|
||||
cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ INM - Istituto di iNgegneria del Mare
|
||||
cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ CDS119
|
||||
cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ IPCB - Istituto per i Polimeri, Compositi e Biomateriali
|
||||
cnr:institutes @=@ https://ror.org/008fjbg42 @=@ CDS121
|
||||
cnr:institutes @=@ https://ror.org/008fjbg42 @=@ IPSP - Istituto per la Protezione Sostenibile delle Piante
|
||||
cnr:institutes @=@ __CDS122__ @=@ CDS122
|
||||
cnr:institutes @=@ __CDS122__ @=@ IRBIM - Istituto per le Risorse Biologiche e le Biotecnologie Marine
|
||||
cnr:institutes @=@ __CDS123__ @=@ CDS123
|
||||
cnr:institutes @=@ __CDS123__ @=@ ISPC - Istituto di Scienze del Patrimonio Culturale
|
||||
cnr:institutes @=@ __CDS124__ @=@ CDS124
|
||||
cnr:institutes @=@ __CDS124__ @=@ IAS - Istituto per lo studio degli impatti Antropici e Sostenibilità in ambiente marino
|
||||
cnr:institutes @=@ __CDS125__ @=@ CDS125
|
||||
cnr:institutes @=@ __CDS125__ @=@ IRET - Istituto di Ricerca sugli Ecosistemi Terrestri
|
||||
cnr:institutes @=@ https://ror.org/03a0vt050 @=@ CDS126
|
||||
cnr:institutes @=@ https://ror.org/03a0vt050 @=@ ISTP - Istituto per la Scienza e Tecnologia dei Plasmi
|
||||
cnr:institutes @=@ __CDS127__ @=@ CDS127
|
||||
cnr:institutes @=@ __CDS127__ @=@ ISP - Istituto di Scienze Polari
|
||||
cnr:institutes @=@ https://ror.org/03byxpq91 @=@ CDS128
|
||||
cnr:institutes @=@ https://ror.org/03byxpq91 @=@ IRIB - Istituto per la Ricerca e l'Innovazione Biomedica
|
||||
cnr:institutes @=@ __CDS129__ @=@ CDS129
|
||||
cnr:institutes @=@ __CDS129__ @=@ IGSG - Istituto di Informatica Giuridica e Sistemi Giudiziari
|
||||
cnr:institutes @=@ __CDS130__ @=@ CDS130
|
||||
cnr:institutes @=@ __CDS130__ @=@ IBBC - Istituto di Biochimica e Biologia Cellulare
|
||||
cnr:institutes @=@ __CDS131__ @=@ CDS131
|
||||
cnr:institutes @=@ __CDS131__ @=@ IBE - Istituto per la BioEconomia
|
||||
cnr:institutes @=@ https://ror.org/0263zy895 @=@ CDS132
|
||||
cnr:institutes @=@ https://ror.org/0263zy895 @=@ SCITEC - Istituto di Scienze e Tecnologie Chimiche \"Giulio Natta\"
|
||||
cnr:institutes @=@ __CDS133__ @=@ CDS133
|
||||
cnr:institutes @=@ __CDS133__ @=@ STEMS - Istituto di Scienze e Tecnologie per l'Energia e la Mobilità Sostenibili
|
|
@ -1077,4 +1077,137 @@ dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/DATASET/IS_SUPPLEMENTED
|
|||
dnet:topic_types @=@ dnet:topic_types @=@ ENRICH/MISSING/AUTHOR/ORCID @=@ An Open Researcher and Contributor ID (ORCID) that can be associated to an author of your publications
|
||||
dnet:review_levels @=@ dnet:review_levels @=@ 0000 @=@ Unknown
|
||||
dnet:review_levels @=@ dnet:review_levels @=@ 0002 @=@ nonPeerReviewed
|
||||
dnet:review_levels @=@ dnet:review_levels @=@ 0001 @=@ peerReviewed
|
||||
dnet:review_levels @=@ dnet:review_levels @=@ 0001 @=@ peerReviewed
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00brf2d87 @=@ https://ror.org/00brf2d87
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/006qkqr45 @=@ https://ror.org/006qkqr45
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/054ye0e45 @=@ https://ror.org/054ye0e45
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00ygy3d85 @=@ https://ror.org/00ygy3d85
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/000sy1f36 @=@ https://ror.org/000sy1f36
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00x5wpm25 @=@ https://ror.org/00x5wpm25
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03eqeqg74 @=@ https://ror.org/03eqeqg74
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/041xzk838 @=@ https://ror.org/041xzk838
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00s2j5046 @=@ https://ror.org/00s2j5046
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS010__ @=@ __CDS010__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS011__ @=@ __CDS011__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02e5sbe24 @=@ https://ror.org/02e5sbe24
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01nyatq71 @=@ https://ror.org/01nyatq71
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01dy2q607 @=@ https://ror.org/01dy2q607
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05nzf7q96 @=@ https://ror.org/05nzf7q96
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05m1yqp60 @=@ https://ror.org/05m1yqp60
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03rqtqb02 @=@ https://ror.org/03rqtqb02
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04r5fge26 @=@ https://ror.org/04r5fge26
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03wyf0g15 @=@ https://ror.org/03wyf0g15
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02fkw1114 @=@ https://ror.org/02fkw1114
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0141vn777 @=@ https://ror.org/0141vn777
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS022__ @=@ __CDS022__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS023__ @=@ __CDS023__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00be3zh53 @=@ https://ror.org/00be3zh53
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS025__ @=@ __CDS025__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02ynrme92 @=@ https://ror.org/02ynrme92
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05wba8r86 @=@ https://ror.org/05wba8r86
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03z58xd74 @=@ https://ror.org/03z58xd74
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00n4jbh84 @=@ https://ror.org/00n4jbh84
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04sn06036 @=@ https://ror.org/04sn06036
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01rg40y89 @=@ https://ror.org/01rg40y89
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00dqega85 @=@ https://ror.org/00dqega85
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02n2bgz18 @=@ https://ror.org/02n2bgz18
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS034__ @=@ __CDS034__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01kdj2848 @=@ https://ror.org/01kdj2848
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/049ebw417 @=@ https://ror.org/049ebw417
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01f5tnx94 @=@ https://ror.org/01f5tnx94
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS038__ @=@ __CDS038__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04hadk112 @=@ https://ror.org/04hadk112
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03qpd8w66 @=@ https://ror.org/03qpd8w66
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01gtsa866 @=@ https://ror.org/01gtsa866
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00ytw6m58 @=@ https://ror.org/00ytw6m58
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/015bmra78 @=@ https://ror.org/015bmra78
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02gdcn153 @=@ https://ror.org/02gdcn153
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS045__ @=@ __CDS045__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05hky6p02 @=@ https://ror.org/05hky6p02
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/011n2hw53 @=@ https://ror.org/011n2hw53
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/028g3pe33 @=@ https://ror.org/028g3pe33
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS049__ @=@ __CDS049__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03m0n3c07 @=@ https://ror.org/03m0n3c07
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS051__ @=@ __CDS051__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00z8ws214 @=@ https://ror.org/00z8ws214
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS053__ @=@ __CDS053__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00bc51d88 @=@ https://ror.org/00bc51d88
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/024ye7w89 @=@ https://ror.org/024ye7w89
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS056__ @=@ __CDS056__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05vk2g845 @=@ https://ror.org/05vk2g845
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02qwy8e97 @=@ https://ror.org/02qwy8e97
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS059__ @=@ __CDS059__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02dr63s31 @=@ https://ror.org/02dr63s31
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0240rwx68 @=@ https://ror.org/0240rwx68
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02rzxrg25 @=@ https://ror.org/02rzxrg25
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05patmk97 @=@ https://ror.org/05patmk97
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS064__ @=@ __CDS064__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/029st3z03 @=@ https://ror.org/029st3z03
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0040zx077 @=@ https://ror.org/0040zx077
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/044bfsy89 @=@ https://ror.org/044bfsy89
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01n1ayq61 @=@ https://ror.org/01n1ayq61
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02db0kh50 @=@ https://ror.org/02db0kh50
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05813wx75 @=@ https://ror.org/05813wx75
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04vnwke91 @=@ https://ror.org/04vnwke91
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02wxw4x45 @=@ https://ror.org/02wxw4x45
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01j6drw72 @=@ https://ror.org/01j6drw72
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05kacka20 @=@ https://ror.org/05kacka20
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00n8ttd98 @=@ https://ror.org/00n8ttd98
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0013zhk30 @=@ https://ror.org/0013zhk30
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03x7xkr71 @=@ https://ror.org/03x7xkr71
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05w9g2j85 @=@ https://ror.org/05w9g2j85
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/032tyv240 @=@ https://ror.org/032tyv240
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02hdf6119 @=@ https://ror.org/02hdf6119
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01yg57d71 @=@ https://ror.org/01yg57d71
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/021z1mz76 @=@ https://ror.org/021z1mz76
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01wqae691 @=@ https://ror.org/01wqae691
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS084__ @=@ __CDS084__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00awwz417 @=@ https://ror.org/00awwz417
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03a111314 @=@ https://ror.org/03a111314
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01zz9wh30 @=@ https://ror.org/01zz9wh30
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/035y5td47 @=@ https://ror.org/035y5td47
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS089__ @=@ __CDS089__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/051t1q308 @=@ https://ror.org/051t1q308
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05k3cs357 @=@ https://ror.org/05k3cs357
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02gcxw165 @=@ https://ror.org/02gcxw165
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05db0es39 @=@ https://ror.org/05db0es39
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/029k6t707 @=@ https://ror.org/029k6t707
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00w6r1881 @=@ https://ror.org/00w6r1881
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01mfmr054 @=@ https://ror.org/01mfmr054
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/058nrs650 @=@ https://ror.org/058nrs650
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0331xj092 @=@ https://ror.org/0331xj092
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/052q58629 @=@ https://ror.org/052q58629
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04ehykb85 @=@ https://ror.org/04ehykb85
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0221agg28 @=@ https://ror.org/0221agg28
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02xz4xc25 @=@ https://ror.org/02xz4xc25
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS103__ @=@ __CDS103__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01as2bh37 @=@ https://ror.org/01as2bh37
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/01y5w6t76 @=@ https://ror.org/01y5w6t76
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/04xy2mq71 @=@ https://ror.org/04xy2mq71
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS107__ @=@ __CDS107__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/013nxtf56 @=@ https://ror.org/013nxtf56
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05rcgef49 @=@ https://ror.org/05rcgef49
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS110__ @=@ __CDS110__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02dp3a879 @=@ https://ror.org/02dp3a879
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS112__ @=@ __CDS112__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00p03yg71 @=@ https://ror.org/00p03yg71
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/00yfw2296 @=@ https://ror.org/00yfw2296
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0042e5975 @=@ https://ror.org/0042e5975
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03ta8pf33 @=@ https://ror.org/03ta8pf33
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/040xhth73 @=@ https://ror.org/040xhth73
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/02qnx8e75 @=@ https://ror.org/02qnx8e75
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/05nr7xa08 @=@ https://ror.org/05nr7xa08
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/008fjbg42 @=@ https://ror.org/008fjbg42
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS122__ @=@ __CDS122__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS123__ @=@ __CDS123__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS124__ @=@ __CDS124__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS125__ @=@ __CDS125__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03a0vt050 @=@ https://ror.org/03a0vt050
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS127__ @=@ __CDS127__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/03byxpq91 @=@ https://ror.org/03byxpq91
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS129__ @=@ __CDS129__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS130__ @=@ __CDS130__
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS131__ @=@ __CDS131__
|
||||
cnr:institutes @=@ cnr:institutes @=@ https://ror.org/0263zy895 @=@ https://ror.org/0263zy895
|
||||
cnr:institutes @=@ cnr:institutes @=@ __CDS133__ @=@ __CDS133__
|
||||
|
||||
|
|
|
@ -2,11 +2,14 @@ package eu.dnetlib.dhp.datacite
|
|||
|
||||
import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
|
||||
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf
|
||||
import eu.dnetlib.dhp.schema.oaf.{Dataset => OafDataset, _}
|
||||
import org.apache.commons.io.FileUtils
|
||||
import org.apache.spark.SparkConf
|
||||
import org.apache.spark.sql.functions.{col, count}
|
||||
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SparkSession}
|
||||
import org.json4s.DefaultFormats
|
||||
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||
import org.json4s.jackson.JsonMethods.parse
|
||||
import org.junit.jupiter.api.Assertions._
|
||||
import org.junit.jupiter.api.extension.ExtendWith
|
||||
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
|
||||
|
@ -70,17 +73,15 @@ class DataciteToOAFTest extends AbstractVocabularyTest {
|
|||
|
||||
assertEquals(100, nativeSize)
|
||||
|
||||
spark.read.load(targetPath).printSchema();
|
||||
|
||||
val result: Dataset[Oaf] = spark.read.load(targetPath).as[Oaf]
|
||||
val result: Dataset[String] =
|
||||
spark.read.text(targetPath).as[String].map(DataciteUtilityTest.convertToOAF)(Encoders.STRING)
|
||||
|
||||
result
|
||||
.map(s => s.getClass.getSimpleName)
|
||||
.groupBy(col("value").alias("class"))
|
||||
.agg(count("value").alias("Total"))
|
||||
.show(false)
|
||||
|
||||
val t = spark.read.load(targetPath).count()
|
||||
val t = spark.read.text(targetPath).as[String].count()
|
||||
|
||||
assertTrue(t > 0)
|
||||
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
package eu.dnetlib.dhp.datacite
|
||||
|
||||
import org.json4s.DefaultFormats
|
||||
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||
import org.json4s.jackson.JsonMethods.parse
|
||||
|
||||
object DataciteUtilityTest {
|
||||
|
||||
def convertToOAF(input: String): String = {
|
||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||
lazy val json = parse(input)
|
||||
|
||||
val isRelation: String = (json \\ "source").extractOrElse("NULL")
|
||||
|
||||
if (isRelation != "NULL") {
|
||||
return "Relation"
|
||||
}
|
||||
|
||||
val iType: List[String] = for {
|
||||
JObject(instance) <- json \\ "instance"
|
||||
JField("instancetype", JObject(instancetype)) <- instance
|
||||
JField("classname", JString(classname)) <- instancetype
|
||||
|
||||
} yield classname
|
||||
|
||||
val l: String = iType.head.toLowerCase()
|
||||
l
|
||||
}
|
||||
|
||||
}
|
|
@ -2,9 +2,10 @@ package eu.dnetlib.dhp.sx.bio
|
|||
|
||||
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
|
||||
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
||||
import eu.dnetlib.dhp.schema.oaf.{Oaf, Relation, Result}
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.PidType
|
||||
import eu.dnetlib.dhp.schema.oaf.{Oaf, Publication, Relation, Result}
|
||||
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
||||
import eu.dnetlib.dhp.sx.bio.pubmed.{PMArticle, PMParser, PubMedToOaf}
|
||||
import eu.dnetlib.dhp.sx.bio.pubmed.{PMArticle, PMParser, PMSubject, PubMedToOaf}
|
||||
import org.json4s.DefaultFormats
|
||||
import org.json4s.JsonAST.{JField, JObject, JString}
|
||||
import org.json4s.jackson.JsonMethods.parse
|
||||
|
@ -16,6 +17,7 @@ import org.mockito.junit.jupiter.MockitoExtension
|
|||
import java.io.{BufferedReader, InputStream, InputStreamReader}
|
||||
import java.util.zip.GZIPInputStream
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.io.Source
|
||||
import scala.xml.pull.XMLEventReader
|
||||
|
||||
|
@ -74,6 +76,95 @@ class BioScholixTest extends AbstractVocabularyTest {
|
|||
|
||||
}
|
||||
|
||||
private def checkPMArticle(article: PMArticle): Unit = {
|
||||
assertNotNull(article.getPmid)
|
||||
assertNotNull(article.getTitle)
|
||||
assertNotNull(article.getAuthors)
|
||||
article.getAuthors.asScala.foreach { a =>
|
||||
assertNotNull(a)
|
||||
assertNotNull(a.getFullName)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
def testParsingPubmedXML(): Unit = {
|
||||
val xml = new XMLEventReader(
|
||||
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||
)
|
||||
val parser = new PMParser(xml)
|
||||
parser.foreach(checkPMArticle)
|
||||
}
|
||||
|
||||
private def checkPubmedPublication(o: Oaf): Unit = {
|
||||
assertTrue(o.isInstanceOf[Publication])
|
||||
val p: Publication = o.asInstanceOf[Publication]
|
||||
assertNotNull(p.getId)
|
||||
assertNotNull(p.getTitle)
|
||||
p.getTitle.asScala.foreach(t => assertNotNull(t.getValue))
|
||||
p.getAuthor.asScala.foreach(a => assertNotNull(a.getFullname))
|
||||
assertNotNull(p.getInstance())
|
||||
p.getInstance().asScala.foreach { i =>
|
||||
assertNotNull(i.getCollectedfrom)
|
||||
assertNotNull(i.getPid)
|
||||
assertNotNull(i.getInstancetype)
|
||||
}
|
||||
assertNotNull(p.getOriginalId)
|
||||
p.getOriginalId.asScala.foreach(oId => assertNotNull(oId))
|
||||
|
||||
val hasPMC = p
|
||||
.getInstance()
|
||||
.asScala
|
||||
.exists(i => i.getPid.asScala.exists(pid => pid.getQualifier.getClassid.equalsIgnoreCase(PidType.pmc.toString)))
|
||||
|
||||
if (hasPMC) {
|
||||
assertTrue(p.getOriginalId.asScala.exists(oId => oId.startsWith("od_______267::")))
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
def testPubmedOriginalID(): Unit = {
|
||||
val article: PMArticle = new PMArticle
|
||||
|
||||
article.setPmid("1234")
|
||||
|
||||
article.setTitle("a Title")
|
||||
|
||||
// VERIFY PUBLICATION IS NOT NULL
|
||||
article.getPublicationTypes.add(new PMSubject("article", null, null))
|
||||
var publication = PubMedToOaf.convert(article, vocabularies).asInstanceOf[Publication]
|
||||
assertNotNull(publication)
|
||||
assertEquals("50|pmid________::81dc9bdb52d04dc20036dbd8313ed055", publication.getId)
|
||||
|
||||
// VERIFY PUBLICATION ID DOES NOT CHANGE ALSO IF SETTING PMC IDENTIFIER
|
||||
article.setPmcId("PMC1517292")
|
||||
publication = PubMedToOaf.convert(article, vocabularies).asInstanceOf[Publication]
|
||||
assertNotNull(publication)
|
||||
assertEquals("50|pmid________::81dc9bdb52d04dc20036dbd8313ed055", publication.getId)
|
||||
|
||||
// VERIFY ORIGINAL ID GENERATE IN OLD WAY USING PMC IDENTIFIER EXISTS
|
||||
|
||||
val oldOpenaireID = "od_______267::0000072375bc0e68fa09d4e6b7658248"
|
||||
|
||||
val hasOldOpenAIREID = publication.getOriginalId.asScala.exists(o => o.equalsIgnoreCase(oldOpenaireID))
|
||||
|
||||
assertTrue(hasOldOpenAIREID)
|
||||
}
|
||||
|
||||
@Test
|
||||
def testPubmedMapping(): Unit = {
|
||||
|
||||
val xml = new XMLEventReader(
|
||||
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||
)
|
||||
val parser = new PMParser(xml)
|
||||
val results = ListBuffer[Oaf]()
|
||||
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
||||
|
||||
results.foreach(checkPubmedPublication)
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
def testPDBToOAF(): Unit = {
|
||||
|
||||
|
|
|
@ -1443,7 +1443,7 @@
|
|||
17
|
||||
]
|
||||
],
|
||||
"date-time": "2021-05-17T15:08:12Z",
|
||||
"date-time": "3021-05-17T15:08:12Z",
|
||||
"timestamp": 1621264092000
|
||||
},
|
||||
"score": 1.0,
|
||||
|
@ -1461,7 +1461,7 @@
|
|||
22
|
||||
]
|
||||
],
|
||||
"date-time": "2021-05-17T15:08:12Z"
|
||||
"date-time": "3021-05-17T15:08:12Z"
|
||||
},
|
||||
"references-count": 83,
|
||||
"journal-issue": {
|
||||
|
|
|
@ -5,6 +5,7 @@ import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
|||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
@ -102,6 +103,7 @@ public class SparkBulkTagJob {
|
|||
ResultTagger resultTagger = new ResultTagger();
|
||||
readPath(spark, inputPath, resultClazz)
|
||||
.map(patchResult(), Encoders.bean(resultClazz))
|
||||
.filter(Objects::nonNull)
|
||||
.map(
|
||||
(MapFunction<R, R>) value -> resultTagger
|
||||
.enrichContextCriteria(
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag.eosc;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 21/07/22
|
||||
*/
|
||||
public class DatasourceMaster implements Serializable {
|
||||
private String datasource;
|
||||
private String master;
|
||||
|
||||
public String getDatasource() {
|
||||
return datasource;
|
||||
}
|
||||
|
||||
public void setDatasource(String datasource) {
|
||||
this.datasource = datasource;
|
||||
}
|
||||
|
||||
public String getMaster() {
|
||||
return master;
|
||||
}
|
||||
|
||||
public void setMaster(String master) {
|
||||
this.master = master;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,136 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag.eosc;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 21/07/22
|
||||
*/
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.DbClient;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.common.RelationInverse;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||
|
||||
public class ReadMasterDatasourceFromDB implements Closeable {
|
||||
|
||||
private final DbClient dbClient;
|
||||
private static final Log log = LogFactory.getLog(ReadMasterDatasourceFromDB.class);
|
||||
|
||||
private final BufferedWriter writer;
|
||||
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private static final String QUERY = "SELECT dso.id datasource, d.id master FROM " +
|
||||
"(SELECT id FROM dsm_services WHERE id like 'eosc%') dso " +
|
||||
"FULL JOIN " +
|
||||
"(SELECT id, duplicate FROM dsm_dedup_services WHERE duplicate like 'eosc%')d " +
|
||||
"ON dso.id = d.duplicate";
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
ReadMasterDatasourceFromDB.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/bulktag/datasourcemaster_parameters.json")));
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String dbUrl = parser.get("postgresUrl");
|
||||
final String dbUser = parser.get("postgresUser");
|
||||
final String dbPassword = parser.get("postgresPassword");
|
||||
final String hdfsPath = parser.get("hdfsPath");
|
||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||
|
||||
try (
|
||||
final ReadMasterDatasourceFromDB rmd = new ReadMasterDatasourceFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
|
||||
dbPassword)) {
|
||||
|
||||
log.info("Processing datasources...");
|
||||
rmd.execute(QUERY, rmd::datasourceMasterMap);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public void execute(final String sql, final Function<ResultSet, DatasourceMaster> producer) {
|
||||
|
||||
dbClient.processResults(sql, rs -> writeMap(producer.apply(rs)));
|
||||
}
|
||||
|
||||
public DatasourceMaster datasourceMasterMap(ResultSet rs) {
|
||||
try {
|
||||
DatasourceMaster dm = new DatasourceMaster();
|
||||
String datasource = rs.getString("datasource");
|
||||
dm.setDatasource(datasource);
|
||||
String master = rs.getString("master");
|
||||
if (StringUtils.isNotBlank(master))
|
||||
dm.setMaster(OafMapperUtils.createOpenaireId(10, master, true));
|
||||
else
|
||||
dm.setMaster(OafMapperUtils.createOpenaireId(10, datasource, true));
|
||||
return dm;
|
||||
|
||||
} catch (final SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
dbClient.close();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public ReadMasterDatasourceFromDB(
|
||||
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
|
||||
throws IOException {
|
||||
|
||||
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
Path hdfsWritePath = new Path(hdfsPath);
|
||||
FSDataOutputStream fsDataOutputStream = null;
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||
} else {
|
||||
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||
}
|
||||
|
||||
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
protected void writeMap(final DatasourceMaster dm) {
|
||||
try {
|
||||
writer.write(OBJECT_MAPPER.writeValueAsString(dm));
|
||||
writer.newLine();
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag.eosc;
|
||||
|
||||
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
||||
import static eu.dnetlib.dhp.PropagationConstant.removeOutputDir;
|
||||
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.*;
|
||||
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.TAGGING_TRUST;
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.print.attribute.DocAttributeSet;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.ForeachFunction;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.bulktag.SparkBulkTagJob;
|
||||
import eu.dnetlib.dhp.bulktag.community.*;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 21/07/22
|
||||
*/
|
||||
public class SparkEoscBulkTag implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkEoscBulkTag.class);
|
||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkEoscBulkTag.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/bulktag/input_eosc_bulkTag_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String workingPath = parser.get("workingPath");
|
||||
log.info("workingPath: {}", workingPath);
|
||||
|
||||
String datasourceMapPath = parser.get("datasourceMapPath");
|
||||
log.info("datasourceMapPath: {}", datasourceMapPath);
|
||||
|
||||
final String resultClassName = parser.get("resultTableName");
|
||||
log.info("resultTableName: {}", resultClassName);
|
||||
|
||||
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
CommunityConfiguration cc;
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
removeOutputDir(spark, workingPath);
|
||||
execBulkTag(spark, inputPath, workingPath, datasourceMapPath, resultClazz);
|
||||
});
|
||||
}
|
||||
|
||||
private static <R extends Result> void execBulkTag(
|
||||
SparkSession spark,
|
||||
String inputPath,
|
||||
String workingPath,
|
||||
String datasourceMapPath,
|
||||
Class<R> resultClazz) {
|
||||
|
||||
List<String> hostedByList = readPath(spark, datasourceMapPath, DatasourceMaster.class)
|
||||
.map((MapFunction<DatasourceMaster, String>) dm -> dm.getMaster(), Encoders.STRING())
|
||||
.collectAsList();
|
||||
|
||||
readPath(spark, inputPath, resultClazz)
|
||||
.map(patchResult(), Encoders.bean(resultClazz))
|
||||
.filter(Objects::nonNull)
|
||||
.map(
|
||||
(MapFunction<R, R>) value -> enrich(value, hostedByList),
|
||||
Encoders.bean(resultClazz))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(workingPath);
|
||||
|
||||
readPath(spark, workingPath, resultClazz)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(inputPath);
|
||||
|
||||
}
|
||||
|
||||
private static <R extends Result> R enrich(R value, List<String> hostedByList) {
|
||||
if (value
|
||||
.getInstance()
|
||||
.stream()
|
||||
.anyMatch(
|
||||
i -> (hostedByList.contains(i.getHostedby().getKey())) ||
|
||||
(value.getEoscifguidelines() != null && value.getEoscifguidelines().size() > 0))
|
||||
&&
|
||||
!value.getContext().stream().anyMatch(c -> c.getId().equals("eosc"))) {
|
||||
Context context = new Context();
|
||||
context.setId("eosc");
|
||||
context
|
||||
.setDataInfo(
|
||||
Arrays
|
||||
.asList(
|
||||
OafMapperUtils
|
||||
.dataInfo(
|
||||
false, BULKTAG_DATA_INFO_TYPE, true, false,
|
||||
OafMapperUtils
|
||||
.qualifier(
|
||||
CLASS_ID_DATASOURCE, CLASS_NAME_BULKTAG_DATASOURCE,
|
||||
DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS),
|
||||
TAGGING_TRUST)));
|
||||
value.getContext().add(context);
|
||||
|
||||
}
|
||||
return value;
|
||||
|
||||
}
|
||||
|
||||
public static <R> Dataset<R> readPath(
|
||||
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
// TODO remove this hack as soon as the values fixed by this method will be provided as NON null
|
||||
private static <R extends Result> MapFunction<R, R> patchResult() {
|
||||
return r -> {
|
||||
if (r.getDataInfo().getDeletedbyinference() == null) {
|
||||
r.getDataInfo().setDeletedbyinference(false);
|
||||
}
|
||||
if (r.getContext() == null) {
|
||||
r.setContext(new ArrayList<>());
|
||||
}
|
||||
return r;
|
||||
};
|
||||
}
|
||||
|
||||
}
|
|
@ -1,13 +1,10 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag;
|
||||
package eu.dnetlib.dhp.bulktag.eosc;
|
||||
|
||||
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
|
@ -21,35 +18,15 @@ import org.slf4j.LoggerFactory;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||
|
||||
public class SparkEoscTag {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkEoscTag.class);
|
||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
public static final Qualifier EOSC_QUALIFIER = OafMapperUtils
|
||||
.qualifier(
|
||||
"EOSC",
|
||||
"European Open Science Cloud",
|
||||
ModelConstants.DNET_SUBJECT_TYPOLOGIES, ModelConstants.DNET_SUBJECT_TYPOLOGIES);
|
||||
public static final DataInfo EOSC_DATAINFO = OafMapperUtils
|
||||
.dataInfo(
|
||||
false, "propagation", true, false,
|
||||
OafMapperUtils
|
||||
.qualifier(
|
||||
"propagation:subject", "Inferred by OpenAIRE",
|
||||
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||
"0.9");
|
||||
public final static StructuredProperty EOSC_NOTEBOOK = OafMapperUtils
|
||||
.structuredProperty(
|
||||
"EOSC::Jupyter Notebook", EOSC_QUALIFIER, EOSC_DATAINFO);
|
||||
public final static StructuredProperty EOSC_GALAXY = OafMapperUtils
|
||||
.structuredProperty(
|
||||
"EOSC::Galaxy Workflow", EOSC_QUALIFIER, EOSC_DATAINFO);
|
||||
public final static StructuredProperty EOSC_TWITTER = OafMapperUtils
|
||||
.structuredProperty(
|
||||
"EOSC::Twitter Data", EOSC_QUALIFIER, EOSC_DATAINFO);
|
||||
public static final String EOSC_GALAXY_WORKFLOW = "EOSC::Galaxy Workflow";
|
||||
public static final String EOSC_TWITTER_DATA = "EOSC::Twitter Data";
|
||||
public static final String EOSC_JUPYTER_NOTEBOOK = "EOSC::Jupyter Notebook";
|
||||
public static final String COMPLIES_WITH = "compliesWith";
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
|
@ -84,29 +61,35 @@ public class SparkEoscTag {
|
|||
});
|
||||
}
|
||||
|
||||
public static EoscIfGuidelines newInstance(String code, String label, String url, String semantics) {
|
||||
EoscIfGuidelines eig = new EoscIfGuidelines();
|
||||
eig.setCode(code);
|
||||
eig.setLabel(label);
|
||||
eig.setUrl(url);
|
||||
eig.setSemanticRelation(semantics);
|
||||
return eig;
|
||||
|
||||
}
|
||||
|
||||
private static void execEoscTag(SparkSession spark, String inputPath, String workingPath) {
|
||||
|
||||
readPath(spark, inputPath + "/software", Software.class)
|
||||
.map((MapFunction<Software, Software>) s -> {
|
||||
List<StructuredProperty> sbject;
|
||||
if (!Optional.ofNullable(s.getSubject()).isPresent())
|
||||
s.setSubject(new ArrayList<>());
|
||||
sbject = s.getSubject();
|
||||
|
||||
if (containsCriteriaNotebook(s)) {
|
||||
sbject.add(EOSC_NOTEBOOK);
|
||||
if (sbject.stream().anyMatch(sb -> sb.getValue().equals("EOSC Jupyter Notebook"))) {
|
||||
sbject = sbject.stream().map(sb -> {
|
||||
if (sb.getValue().equals("EOSC Jupyter Notebook")) {
|
||||
return null;
|
||||
}
|
||||
return sb;
|
||||
}).filter(Objects::nonNull).collect(Collectors.toList());
|
||||
s.setSubject(sbject);
|
||||
}
|
||||
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
||||
s.setEoscifguidelines(new ArrayList<>());
|
||||
addEIG(
|
||||
s.getEoscifguidelines(), EOSC_JUPYTER_NOTEBOOK, EOSC_JUPYTER_NOTEBOOK, "",
|
||||
COMPLIES_WITH);
|
||||
|
||||
}
|
||||
if (containsCriteriaGalaxy(s)) {
|
||||
sbject.add(EOSC_GALAXY);
|
||||
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
||||
s.setEoscifguidelines(new ArrayList<>());
|
||||
|
||||
addEIG(
|
||||
s.getEoscifguidelines(), EOSC_GALAXY_WORKFLOW, EOSC_GALAXY_WORKFLOW, "", COMPLIES_WITH);
|
||||
}
|
||||
return s;
|
||||
}, Encoders.bean(Software.class))
|
||||
|
@ -123,15 +106,17 @@ public class SparkEoscTag {
|
|||
|
||||
readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class)
|
||||
.map((MapFunction<OtherResearchProduct, OtherResearchProduct>) orp -> {
|
||||
List<StructuredProperty> sbject;
|
||||
if (!Optional.ofNullable(orp.getSubject()).isPresent())
|
||||
orp.setSubject(new ArrayList<>());
|
||||
sbject = orp.getSubject();
|
||||
|
||||
if (!Optional.ofNullable(orp.getEoscifguidelines()).isPresent())
|
||||
orp.setEoscifguidelines(new ArrayList<>());
|
||||
|
||||
if (containsCriteriaGalaxy(orp)) {
|
||||
sbject.add(EOSC_GALAXY);
|
||||
addEIG(
|
||||
orp.getEoscifguidelines(), EOSC_GALAXY_WORKFLOW, EOSC_GALAXY_WORKFLOW, "",
|
||||
COMPLIES_WITH);
|
||||
}
|
||||
if (containscriteriaTwitter(orp)) {
|
||||
sbject.add(EOSC_TWITTER);
|
||||
addEIG(orp.getEoscifguidelines(), EOSC_TWITTER_DATA, EOSC_TWITTER_DATA, "", COMPLIES_WITH);
|
||||
}
|
||||
return orp;
|
||||
}, Encoders.bean(OtherResearchProduct.class))
|
||||
|
@ -148,12 +133,11 @@ public class SparkEoscTag {
|
|||
|
||||
readPath(spark, inputPath + "/dataset", Dataset.class)
|
||||
.map((MapFunction<Dataset, Dataset>) d -> {
|
||||
List<StructuredProperty> sbject;
|
||||
if (!Optional.ofNullable(d.getSubject()).isPresent())
|
||||
d.setSubject(new ArrayList<>());
|
||||
sbject = d.getSubject();
|
||||
|
||||
if (!Optional.ofNullable(d.getEoscifguidelines()).isPresent())
|
||||
d.setEoscifguidelines(new ArrayList<>());
|
||||
if (containscriteriaTwitter(d)) {
|
||||
sbject.add(EOSC_TWITTER);
|
||||
addEIG(d.getEoscifguidelines(), EOSC_TWITTER_DATA, EOSC_TWITTER_DATA, "", COMPLIES_WITH);
|
||||
}
|
||||
return d;
|
||||
}, Encoders.bean(Dataset.class))
|
||||
|
@ -169,6 +153,12 @@ public class SparkEoscTag {
|
|||
.json(inputPath + "/dataset");
|
||||
}
|
||||
|
||||
private static void addEIG(List<EoscIfGuidelines> eoscifguidelines, String code, String label, String url,
|
||||
String sem) {
|
||||
if (!eoscifguidelines.stream().anyMatch(eig -> eig.getCode().equals(code)))
|
||||
eoscifguidelines.add(newInstance(code, label, url, sem));
|
||||
}
|
||||
|
||||
private static boolean containscriteriaTwitter(Result r) {
|
||||
Set<String> words = getWordsSP(r.getTitle());
|
||||
words.addAll(getWordsF(r.getDescription()));
|
||||
|
@ -177,10 +167,12 @@ public class SparkEoscTag {
|
|||
(words.contains("data") || words.contains("dataset")))
|
||||
return true;
|
||||
|
||||
if (r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("twitter")) &&
|
||||
r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("data")))
|
||||
return true;
|
||||
return false;
|
||||
return Optional
|
||||
.ofNullable(r.getSubject())
|
||||
.map(
|
||||
s -> s.stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("twitter")) &&
|
||||
s.stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("data")))
|
||||
.orElse(false);
|
||||
}
|
||||
|
||||
private static boolean containsCriteriaGalaxy(Result r) {
|
||||
|
@ -190,13 +182,17 @@ public class SparkEoscTag {
|
|||
words.contains("workflow"))
|
||||
return true;
|
||||
|
||||
if (r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("galaxy")) &&
|
||||
r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("workflow")))
|
||||
return true;
|
||||
return false;
|
||||
return Optional
|
||||
.ofNullable(r.getSubject())
|
||||
.map(
|
||||
s -> s.stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("galaxy")) &&
|
||||
s.stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("workflow")))
|
||||
.orElse(false);
|
||||
}
|
||||
|
||||
private static boolean containsCriteriaNotebook(Software s) {
|
||||
if (!Optional.ofNullable(s.getSubject()).isPresent())
|
||||
return false;
|
||||
if (s.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("jupyter")))
|
||||
return true;
|
||||
if (s
|
||||
|
@ -212,13 +208,6 @@ public class SparkEoscTag {
|
|||
return false;
|
||||
}
|
||||
|
||||
private static Set<String> getSubjects(List<StructuredProperty> s) {
|
||||
Set<String> subjects = new HashSet<>();
|
||||
s.stream().forEach(sbj -> subjects.addAll(Arrays.asList(sbj.getValue().toLowerCase().split(" "))));
|
||||
s.stream().forEach(sbj -> subjects.add(sbj.getValue().toLowerCase()));
|
||||
return subjects;
|
||||
}
|
||||
|
||||
private static Set<String> getWordsSP(List<StructuredProperty> elem) {
|
||||
Set<String> words = new HashSet<>();
|
||||
Optional
|
||||
|
@ -242,10 +231,7 @@ public class SparkEoscTag {
|
|||
t -> words
|
||||
.addAll(
|
||||
Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" ")))));
|
||||
// elem
|
||||
// .forEach(
|
||||
// t -> words.addAll(Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" "))));
|
||||
return words;
|
||||
|
||||
return words;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
[
|
||||
{
|
||||
"paramName": "p",
|
||||
"paramLongName": "hdfsPath",
|
||||
"paramDescription": "the path where storing the sequential file",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "nn",
|
||||
"paramLongName": "hdfsNameNode",
|
||||
"paramDescription": "the name node on hdfs",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "pgurl",
|
||||
"paramLongName": "postgresUrl",
|
||||
"paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "pguser",
|
||||
"paramLongName": "postgresUser",
|
||||
"paramDescription": "postgres user",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "pgpasswd",
|
||||
"paramLongName": "postgresPassword",
|
||||
"paramDescription": "postgres password",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -0,0 +1,34 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "dmp",
|
||||
"paramLongName":"datasourceMapPath",
|
||||
"paramDescription": "the path where the association datasource master has been stored",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"tn",
|
||||
"paramLongName":"resultTableName",
|
||||
"paramDescription": "the name of the result table we are currently working on",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "wp",
|
||||
"paramLongName": "workingPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
}
|
||||
|
||||
]
|
|
@ -16,6 +16,21 @@
|
|||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
|
||||
|
||||
<property>
|
||||
<name>postgresURL</name>
|
||||
<description>the url of the postgress server to query</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresUser</name>
|
||||
<description>the username to access the postgres db</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>postgresPassword</name>
|
||||
<description>the postgres password</description>
|
||||
</property>
|
||||
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
|
@ -211,7 +226,7 @@
|
|||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>EOSC_tagging</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.SparkEoscTag</class>
|
||||
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscTag</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
|
@ -226,10 +241,132 @@
|
|||
<arg>--sourcePath</arg><arg>${outputPath}</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/eoscTag</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<ok to="eosc_get_datasource_master"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="eosc_get_datasource_master">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.bulktag.eosc.ReadMasterDatasourceFromDB</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||
</java>
|
||||
<ok to="fork_eosc_context_tag"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="fork_eosc_context_tag">
|
||||
<path start="eosc_context_tag_publication"/>
|
||||
<path start="eosc_context_tag_dataset"/>
|
||||
<path start="eosc_context_tag_otherresearchproduct"/>
|
||||
<path start="eosc_context_tag_software"/>
|
||||
</fork>
|
||||
|
||||
<action name="eosc_context_tag_publication">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>EOSC_tagging</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${outputPath}/publication</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||
</spark>
|
||||
<ok to="wait_eosc_context_tag"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="eosc_context_tag_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>EOSC_tagging</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${outputPath}/dataset</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/dataset</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||
</spark>
|
||||
<ok to="wait_eosc_context_tag"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<action name="eosc_context_tag_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>EOSC_tagging</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${outputPath}/software</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/software</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||
</spark>
|
||||
<ok to="wait_eosc_context_tag"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<action name="eosc_context_tag_otherresearchproduct">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn-cluster</master>
|
||||
<mode>cluster</mode>
|
||||
<name>EOSC_tagging</name>
|
||||
<class>eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag</class>
|
||||
<jar>dhp-enrichment-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--num-executors=${sparkExecutorNumber}
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${outputPath}/otherresearchproduct</arg>
|
||||
<arg>--workingPath</arg><arg>${workingDir}/eoscContextTag/otherresearchproduct</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--datasourceMapPath</arg><arg>${workingDir}/datasourcemaster</arg>
|
||||
</spark>
|
||||
<ok to="wait_eosc_context_tag"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<join name="wait_eosc_context_tag" to="End"/>
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -0,0 +1,162 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author miriam.baglioni
|
||||
* @Date 22/07/22
|
||||
*/
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc
|
||||
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc
|
||||
//"50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7" has two instance one hostedby eosc
|
||||
//"50|475c1990cbb2::3894c94123e96df8a21249957cf160cb" has EoscTag
|
||||
|
||||
public class EOSCContextTaggingTest {
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private static SparkSession spark;
|
||||
|
||||
private static Path workingDir;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(EOSCContextTaggingTest.class);
|
||||
|
||||
@BeforeAll
|
||||
public static void beforeAll() throws IOException {
|
||||
workingDir = Files.createTempDirectory(EOSCContextTaggingTest.class.getSimpleName());
|
||||
log.info("using work dir {}", workingDir);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
conf.setAppName(EOSCContextTaggingTest.class.getSimpleName());
|
||||
|
||||
conf.setMaster("local[*]");
|
||||
conf.set("spark.driver.host", "localhost");
|
||||
conf.set("hive.metastore.local", "true");
|
||||
conf.set("spark.ui.enabled", "false");
|
||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||
|
||||
spark = SparkSession
|
||||
.builder()
|
||||
.appName(EOSCTagJobTest.class.getSimpleName())
|
||||
.config(conf)
|
||||
.getOrCreate();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void afterAll() throws IOException {
|
||||
FileUtils.deleteDirectory(workingDir.toFile());
|
||||
spark.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
void EoscContextTagTest() throws Exception {
|
||||
|
||||
spark
|
||||
.read()
|
||||
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
|
||||
.map(
|
||||
(MapFunction<String, Dataset>) value -> OBJECT_MAPPER.readValue(value, Dataset.class),
|
||||
Encoders.bean(Dataset.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(workingDir.toString() + "/input/dataset");
|
||||
|
||||
SparkEoscBulkTag
|
||||
.main(
|
||||
new String[] {
|
||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||
"-sourcePath",
|
||||
workingDir.toString() + "/input/dataset",
|
||||
"-workingPath", workingDir.toString() + "/working/dataset",
|
||||
"-datasourceMapPath",
|
||||
getClass()
|
||||
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
||||
.getPath(),
|
||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset"
|
||||
});
|
||||
|
||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||
|
||||
JavaRDD<Dataset> tmp = sc
|
||||
.textFile(workingDir.toString() + "/input/dataset")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||
|
||||
Assertions.assertEquals(10, tmp.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
4,
|
||||
tmp
|
||||
.filter(
|
||||
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||
&&
|
||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||
.count());
|
||||
}
|
||||
|
||||
}
|
|
@ -1,21 +1,17 @@
|
|||
|
||||
package eu.dnetlib.dhp.bulktag;
|
||||
|
||||
import static eu.dnetlib.dhp.bulktag.community.TaggingConstants.ZENODO_COMMUNITY_INDICATOR;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.JavaRDD;
|
||||
import org.apache.spark.api.java.JavaSparkContext;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.Row;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
|
@ -27,6 +23,7 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscTag;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
public class EOSCTagJobTest {
|
||||
|
@ -126,10 +123,23 @@ public class EOSCTagJobTest {
|
|||
.assertEquals(
|
||||
4,
|
||||
tmp
|
||||
.filter(s -> s.getEoscifguidelines() != null)
|
||||
.filter(
|
||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||
s -> s
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Jupyter Notebook")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.size());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1, tmp
|
||||
|
@ -140,6 +150,16 @@ public class EOSCTagJobTest {
|
|||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||
|
||||
Assertions
|
||||
.assertFalse(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
|
@ -166,16 +186,24 @@ public class EOSCTagJobTest {
|
|||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::501b25d420f808c8eddcd9b16e917f11"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines() == null);
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
9, tmp
|
||||
8, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getSubject()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
.assertFalse(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||
.collect()
|
||||
|
@ -183,6 +211,23 @@ public class EOSCTagJobTest {
|
|||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::581621232a561b7e8b4952b18b8b0e56"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
|
@ -201,17 +246,24 @@ public class EOSCTagJobTest {
|
|||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::5aec1186054301b66c0c5dc35972a589"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines() == null);
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
9, tmp
|
||||
8, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getSubject()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
.assertFalse(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||
.collect()
|
||||
|
@ -219,14 +271,31 @@ public class EOSCTagJobTest {
|
|||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::639909adfad9d708308f2aedb733e4a0"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getCode().equals("EOSC::Jupyter Notebook")));
|
||||
|
||||
List<StructuredProperty> subjects = tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::6e7a9b21a2feef45673890432af34244"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getSubject();
|
||||
Assertions.assertEquals(8, subjects.size());
|
||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("EOSC::Jupyter Notebook")));
|
||||
Assertions.assertEquals(7, subjects.size());
|
||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("jupyter")));
|
||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("Modeling and Simulation")));
|
||||
Assertions.assertTrue(subjects.stream().anyMatch(s -> s.getValue().equals("structure granulaire")));
|
||||
|
@ -250,6 +319,17 @@ public class EOSCTagJobTest {
|
|||
.filter(
|
||||
ds -> ds.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
0, sc
|
||||
.textFile(workingDir.toString() + "/input/dataset")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class))
|
||||
.filter(
|
||||
ds -> ds
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Jupyter Notebook")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
|
@ -264,7 +344,22 @@ public class EOSCTagJobTest {
|
|||
.textFile(workingDir.toString() + "/input/otherresearchproduct")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, OtherResearchProduct.class))
|
||||
.filter(
|
||||
ds -> ds.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||
orp -> orp
|
||||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(sbj -> sbj.getValue().equals("EOSC::Jupyter Notebook")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
0, sc
|
||||
.textFile(workingDir.toString() + "/input/otherresearchproduct")
|
||||
.map(item -> OBJECT_MAPPER.readValue(item, OtherResearchProduct.class))
|
||||
.filter(
|
||||
orp -> orp
|
||||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getValue().equals("EOSC::Jupyter Notebook")))
|
||||
.count());
|
||||
|
||||
// spark.stop();
|
||||
|
@ -326,22 +421,41 @@ public class EOSCTagJobTest {
|
|||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
0,
|
||||
tmp
|
||||
.filter(
|
||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
s -> s.getEoscifguidelines() != null)
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
tmp
|
||||
.filter(
|
||||
s -> s.getEoscifguidelines() != null)
|
||||
.filter(
|
||||
s -> s
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
2, tmp
|
||||
1, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getSubject()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
.assertFalse(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
|
@ -350,6 +464,24 @@ public class EOSCTagJobTest {
|
|||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1, tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
tmp
|
||||
.filter(sw -> sw.getId().equals("50|od______1582::4132f5ec9496f0d6adc7b00a50a56ff4"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
5, tmp
|
||||
|
@ -385,22 +517,34 @@ public class EOSCTagJobTest {
|
|||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1,
|
||||
0,
|
||||
orp
|
||||
.filter(
|
||||
s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Galaxy Workflow")))
|
||||
.count());
|
||||
orp.foreach(o -> System.out.println(OBJECT_MAPPER.writeValueAsString(o)));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
3, orp
|
||||
1, orp
|
||||
.filter(o -> o.getEoscifguidelines() != null)
|
||||
.filter(
|
||||
o -> o
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Galaxy Workflow")))
|
||||
.count());
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
2, orp
|
||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getSubject()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
.assertFalse(
|
||||
orp
|
||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||
.collect()
|
||||
|
@ -408,6 +552,23 @@ public class EOSCTagJobTest {
|
|||
.getSubject()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getValue().equals("EOSC::Galaxy Workflow")));
|
||||
Assertions
|
||||
.assertEquals(
|
||||
1, orp
|
||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.size());
|
||||
Assertions
|
||||
.assertTrue(
|
||||
orp
|
||||
.filter(sw -> sw.getId().equals("50|od______2017::0750a4d0782265873d669520f5e33c07"))
|
||||
.collect()
|
||||
.get(0)
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(s -> s.getCode().equals("EOSC::Galaxy Workflow")));
|
||||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
|
@ -516,10 +677,20 @@ public class EOSCTagJobTest {
|
|||
|
||||
Assertions
|
||||
.assertEquals(
|
||||
3,
|
||||
0,
|
||||
orp
|
||||
.filter(s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Twitter Data")))
|
||||
.count());
|
||||
Assertions
|
||||
.assertEquals(
|
||||
3,
|
||||
orp
|
||||
.filter(
|
||||
s -> s
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Twitter Data")))
|
||||
.count());
|
||||
|
||||
JavaRDD<Dataset> dats = sc
|
||||
.textFile(workingDir.toString() + "/input/dataset")
|
||||
|
@ -531,7 +702,11 @@ public class EOSCTagJobTest {
|
|||
.assertEquals(
|
||||
3,
|
||||
dats
|
||||
.filter(s -> s.getSubject().stream().anyMatch(sbj -> sbj.getValue().equals("EOSC::Twitter Data")))
|
||||
.filter(
|
||||
s -> s
|
||||
.getEoscifguidelines()
|
||||
.stream()
|
||||
.anyMatch(eig -> eig.getCode().equals("EOSC::Twitter Data")))
|
||||
.count());
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,318 @@
|
|||
{"datasource":"eosc________::100percentit::100percentit.100_percent_it_trusted_cloud","master":"10|eosc________::7ef2576047f040612b983a27347471fc"}
|
||||
{"datasource":"eosc________::altec::altec.space-vis_adn_service","master":"10|eosc________::2946c48bbcc514ad76bbbf727d5d8fbc"}
|
||||
{"datasource":"eosc________::astron::astron.","master":"10|eosc________::acb262d4bfdeb6aa9b463a4a6d0d662a"}
|
||||
{"datasource":"eosc________::athena::athena.atmo-flud","master":"10|eosc________::ac448975e1d7f8b0266c8bb3b3992029"}
|
||||
{"datasource":"eosc________::athena::athena.uw-map","master":"10|eosc________::5f2a401cf8ce9dc22a3776cea519b594"}
|
||||
{"datasource":"eosc________::athena::athena.verbal_aggression_analyser_va_analyser","master":"10|eosc________::8b26233e89a50e3754972b1341130494"}
|
||||
{"datasource":"eosc________::authenix::authenix.authenix","master":"10|eosc________::3cd84764da5728473593a580efb29a40"}
|
||||
{"datasource":"eosc________::bineo::bineo.cos4bio","master":"10|eosc________::903e0526a6e56eeaf0e4561aa862ecb8"}
|
||||
{"datasource":"eosc________::blue-cloud::blue-cloud.phytoplankton_eovs","master":"10|eosc________::c2438d79b48baf817956f3856877b3b8"}
|
||||
{"datasource":"eosc________::bsc-es::bsc-es.bdrc_-_barcelona_dust_regional_center","master":"10|eosc________::756664ca614118315840eb8e985e4377"}
|
||||
{"datasource":"eosc________::bsc-es::bsc-es.openebench","master":"10|eosc________::69ed72b873b803feed5ba6ae47548419"}
|
||||
{"datasource":"eosc________::capsh::capsh.dissemin","master":"10|eosc________::e81587742e4107ce83723df17c27cb40"}
|
||||
{"datasource":"eosc________::carlzeissm::carlzeissm.aper","master":"10|eosc________::f3beb9ee5ee293b723e2edd6f990fde3"}
|
||||
{"datasource":"eosc________::ccsd::ccsd.episciences","master":"10|eosc________::e1e9de0dbf4bce79c49338d7cf9327e2"}
|
||||
{"datasource":"eosc________::cds::cds.simbad_simbad_astronomical_database_provides_basic_data_cross-identifications_bibliography_and_measurements_for_astronomical_objects_outside_the_solar_system","master":"10|eosc________::a1e41e71453ac32161f4ac3f5c0e0421"}
|
||||
{"datasource":"eosc________::centerdata::centerdata.surveycodingsorg","master":"10|eosc________::72db73ab253727c889905da50f506d10"}
|
||||
{"datasource":"eosc________::cesga::cesga.finisterrae","master":"10|eosc________::6af4303d93f72744cc4c3c815ed2c9a0"}
|
||||
{"datasource":"eosc________::cesnet::cesnet.metacentrum_cloud","master":"10|eosc________::cebfaa2d0b93502d56a8fbeb6b66cfbe"}
|
||||
{"datasource":"eosc________::cesnet::cesnet.object_based_storage","master":"10|eosc________::1c5b55339bb86ff997a256d42d7be4b0"}
|
||||
{"datasource":"eosc________::cesnet::cesnet.umsa_-_untargeted_mass_spectrometry_data_analysis","master":"10|eosc________::d928868211759352cb1604713e0347ec"}
|
||||
{"datasource":"eosc________::cessda-eric::cessda-eric.cessda_data_catalogue","master":"10|fairsharing_::936824c0191953647ec609b4f49bc964"}
|
||||
{"datasource":"eosc________::cessda-eric::cessda-eric.data_management_expert_guide_dmeg","master":"10|eosc________::22c14aaf31fc64424fa97adffe6380b9"}
|
||||
{"datasource":"eosc________::cessda-eric::cessda-eric.elsst__european_language_social_science_thesaurus","master":"10|eosc________::5b30e057381cf0200dc2cdc7b562f570"}
|
||||
{"datasource":"eosc________::cines::cines.etdr","master":"10|eosc________::3b7f7d6aafb0154025330183d59ce670"}
|
||||
{"datasource":"eosc________::clarin-eric::clarin-eric.language_resource_switchboard","master":"10|eosc________::3531aa80dbe2b1018133b510a933de40"}
|
||||
{"datasource":"eosc________::clarin-eric::clarin-eric.virtual_collection_registry","master":"10|eosc________::454e4f7f9f53d9dacf9dc3ba27902f16"}
|
||||
{"datasource":"eosc________::clarin-eric::clarin-eric.virtual_language_observatory","master":"10|eosc________::4db0c877190783461728c6714cb66cbc"}
|
||||
{"datasource":"eosc________::cloudferro::cloudferro.data_collections_catalog","master":"10|eosc________::eba1540eb9e87231fdf366eb23d16c3a"}
|
||||
{"datasource":"eosc________::cloudferro::cloudferro.data_related_services_-_eo_browser","master":"10|eosc________::c24ebda20485c08293b72561ee3c634b"}
|
||||
{"datasource":"eosc________::cloudferro::cloudferro.data_related_services_-_eo_finder","master":"10|eosc________::3d68186239b6c0f0d677ff55d9b549d1"}
|
||||
{"datasource":"eosc________::cloudferro::cloudferro.infrastructure","master":"10|eosc________::ac7e3c0151fa3f11d3a7739dddaa3416"}
|
||||
{"datasource":"eosc________::cmcc::cmcc.enes_data_space","master":"10|eosc________::2925e4df4147819e5b5d2f886f40e3a2"}
|
||||
{"datasource":"eosc________::cnb-csic::cnb-csic.3dbionotes-ws_web_application_to_annotate_biochemical_and_biomedical_information_onto_structural_models","master":"10|eosc________::77fe0a66415f2440ab60d47dcee678a5"}
|
||||
{"datasource":"eosc________::cnb-csic::cnb-csic.scipioncloud","master":"10|eosc________::7f09b7fee99363813f24aca9ebdecf61"}
|
||||
{"datasource":"eosc________::cnr-iia::cnr-iia.geo_dab","master":"10|eosc________::108b0148352c15ee1ce935699e09add3"}
|
||||
{"datasource":"eosc________::collabwith::collabwith.collabwith_marketplace","master":"10|eosc________::894a0ffa7768b228c1b46793670c85e6"}
|
||||
{"datasource":"eosc________::coronis_computing_sl::coronis_computing_sl.uw-mos","master":"10|eosc________::9cbf0a75d817e291771b8bce6440f5f4"}
|
||||
{"datasource":"eosc________::coronis_computing_sl::coronis_computing_sl.vd-maps","master":"10|eosc________::b5af1514b39d8e021554a73076a694d9"}
|
||||
{"datasource":"eosc________::creaf::creaf.nimmbus_geospatial_user_feedback","master":"10|eosc________::86c325db16448760b3390dda7e46631a"}
|
||||
{"datasource":"eosc________::creatis::creatis.virtual_imaging_platform","master":"10|eosc________::01a45ac2677f89414af91e651735846d"}
|
||||
{"datasource":"eosc________::cs_group::cs_group.ai4geo_engine","master":"10|eosc________::c61211295d27e5e08f4c64f3e3098294"}
|
||||
{"datasource":"eosc________::csc-fi::csc-fi.chipster","master":"10|eosc________::61549f785a2c93939be011b0453a6981"}
|
||||
{"datasource":"eosc________::csc-fi::csc-fi.cpouta","master":"10|eosc________::d71c843b4e00eff17db07bf9d10769f9"}
|
||||
{"datasource":"eosc________::csc-fi::csc-fi.csc_epouta","master":"10|eosc________::4493bd6a93e5b8465fda8cf7ab2dfdea"}
|
||||
{"datasource":"eosc________::csc-fi::csc-fi.rahti_container_cloud","master":"10|eosc________::cc60eb9fc76f9598ee581eff0792573b"}
|
||||
{"datasource":"eosc________::cscs::cscs.object_storage","master":"10|eosc________::3da6a817fe85ef43f7d97ef07e467d45"}
|
||||
{"datasource":"eosc________::csi_piemonte::csi_piemonte.nivola2","master":"10|eosc________::ac6483be3e556c8652b8595680795983"}
|
||||
{"datasource":"eosc________::csic::csic.csic_cloud_infrastructure","master":"10|eosc________::05ea2eb193382e22f32b32fbe9a4d961"}
|
||||
{"datasource":"eosc________::cyberbotics::cyberbotics.robotbenchmark","master":"10|eosc________::27ee094c68b7a758ca2915aca6215a1d"}
|
||||
{"datasource":"eosc________::d4science::d4science.alien_and_invasive_species_vre","master":"10|eosc________::b5cff6d55dcf6c20e78a0f1f847b3005"}
|
||||
{"datasource":"eosc________::d4science::d4science.rprototypinglab_virtual_research_environment","master":"10|eosc________::8073ab0dbb22dc3b9f17627a7b25903f"}
|
||||
{"datasource":"eosc________::d4science::d4science.visual_media_service_vre","master":"10|eosc________::eabf459f53c2bfe6247f006fcc0f4db7"}
|
||||
{"datasource":"eosc________::dariah_eric::dariah_eric.dariah-campus","master":"10|eosc________::9c63075d6642a2d269776c2b90c2f976"}
|
||||
{"datasource":"eosc________::dariah_eric::dariah_eric.ssh_open_marketplace","master":"10|eosc________::91fe494a3c21805febb03353152f1212"}
|
||||
{"datasource":"eosc________::datacite::datacite.datacite_doi_registration_service","master":"10|eosc________::c146a470f01ee7ded3b55acda9362e7f"}
|
||||
{"datasource":"eosc________::dcc-uk::dcc-uk.dmponline","master":"10|eosc________::fe480090e0739dab86b24a11177eeffd"}
|
||||
{"datasource":"eosc________::denbi::denbi.cloud","master":"10|eosc________::59399e560967488c0ae0329e0d37f5b4"}
|
||||
{"datasource":"eosc________::desy::desy.pan_data","master":"10|eosc________::52008fe404bf2e939140109162f9233f"}
|
||||
{"datasource":"eosc________::desy::desy.pan_faas","master":"10|eosc________::026939c4b12d7d71e2b05bc5acde804e"}
|
||||
{"datasource":"eosc________::desy::desy.pan_gitlab","master":"10|eosc________::f13cefc9f3207cb82f3285b05f190f78"}
|
||||
{"datasource":"eosc________::desy::desy.pan_notebook","master":"10|eosc________::500fe61cce6562797cd43797aab12be5"}
|
||||
{"datasource":"eosc________::digitalglobe::digitalglobe.earthwatch","master":"10|eosc________::020d905260267066c1926f526bb86f30"}
|
||||
{"datasource":"eosc________::dkrz::dkrz.enes_climate_analytics_service","master":"10|eosc________::1d7a1fea6694d15d9e67f08e1e77082b"}
|
||||
{"datasource":"eosc________::doabf::doabf.operas_certification","master":"10|eosc________::79b9748edeffb872a28660a9d238dcec"}
|
||||
{"datasource":"eosc________::ds-wizard::ds-wizard.data_stewardship_wizard","master":"10|eosc________::fc6bad963e15e218efc62c7befd122af"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.check-in","master":"10|eosc________::baa3c497b9499b3d8c87ea8d2b37a44f"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.cloud_compute","master":"10|eosc________::b1179384a336d409fc909fe3711d3d1f"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.cloud_container_compute","master":"10|eosc________::a66bb1ac56a3bcf2c24b0ef85ed2bdfc"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.data_transfer","master":"10|eosc________::6c0bf38e885c42161b88093517f6cd3e"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.egi_datahub","master":"10|eosc________::5a260dae80795584ac08df133adb1fad"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.fitsm_training","master":"10|eosc________::927b4455c0a21692d2a9f634bccd8309"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.high-throughput_compute","master":"10|eosc________::e27ec11ac7b7d6ffbbce668b7d1f81d5"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.iso_27001_training","master":"10|eosc________::98a6655b6421166c5c29baa2f5815de3"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.notebook","master":"10|eosc________::1d37909a6a31147a09ee9f2e579a6706"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.online_storage","master":"10|eosc________::d8b94284582d3e2185a782ae2ba42186"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.training_infrastructure","master":"10|eosc________::38cdb8e44638f2e561c466f0dd26cf96"}
|
||||
{"datasource":"eosc________::egi-fed::egi-fed.workload_manager","master":"10|eosc________::ff515071cd88afb40599edcb6637f47e"}
|
||||
{"datasource":"eosc________::ehri::ehri.begrenzte_flucht","master":"10|eosc________::01d1445605fc1d25e6a7f21ba995d724"}
|
||||
{"datasource":"eosc________::ehri::ehri.diplomatic_reports","master":"10|eosc________::11714353d2ed069ca30b177d4b4d9e0f"}
|
||||
{"datasource":"eosc________::ehri::ehri.early_holocaust_testimony","master":"10|eosc________::0a4974b0bb295b98f88cb7c793f91c17"}
|
||||
{"datasource":"eosc________::ehri::ehri.ehri_document_blog","master":"10|eosc________::fb9291f8dac099986eafe957b169ed97"}
|
||||
{"datasource":"eosc________::ehri::ehri.international_research_portal_for_records_related_to_nazi-era_cultural_property","master":"10|eosc________::01c5b10e57f9cbb4f3125f427375487e"}
|
||||
{"datasource":"eosc________::ehri::ehri.the_ehri_portal","master":"10|eosc________::6ad4d5352fd192b5fecd76bbd7a7e8b7"}
|
||||
{"datasource":"eosc________::eiscat::eiscat.eiscat_data_access_portal","master":"10|eosc________::0f06a55c8333ae4d197c1d263b2be6ba"}
|
||||
{"datasource":"eosc________::elixir-italy::elixir-italy.laniakea_recas","master":"10|eosc________::01e84abe377339ea57ed521ac39130e9"}
|
||||
{"datasource":"eosc________::elixir-uk::elixir-uk.cyverse_uk","master":"10|eosc________::6a6a05847befec6587bef7673112f5e5"}
|
||||
{"datasource":"eosc________::elixir-uk::elixir-uk.workflowhub","master":"10|fairsharing_::c8cd63e1bf13c5016881652983fb615a"}
|
||||
{"datasource":"eosc________::elsevier::elsevier.digital_commons","master":"10|eosc________::67d38b6a1f43184676b113369554676b"}
|
||||
{"datasource":"eosc________::embl-ebi::embl-ebi.embassy_cloud","master":"10|eosc________::7f8b24797312b851916ee1be0f836de6"}
|
||||
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg","master":"10|eosc________::564e9f467aad251143e12e2e6ec19768"}
|
||||
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg_central_registry","master":"10|eosc________::441caf7eaa4a6602aceae36b2697b924"}
|
||||
{"datasource":"eosc________::embl-ebi::embl-ebi.identifiersorg_resolution_services","master":"10|eosc________::8df6273a1cb2289dbbe3a4b5fe05aa53"}
|
||||
{"datasource":"eosc________::emso_eric::emso_eric.emso_eric_data_portal","master":"10|eosc________::94a41630bd9ddea4a88ec0bfba1b9d95"}
|
||||
{"datasource":"eosc________::enermaps::enermaps.enermaps_data_management_tool","master":"10|eosc________::11496ee8a69b4b955200da7f2c12fe3b"}
|
||||
{"datasource":"eosc________::enhancer::enhancer.openrdmeu","master":"10|eosc________::04820bece2545235144903dec056bcbd"}
|
||||
{"datasource":"eosc________::enhancer::enhancer.swiss_escience_grid_certificates","master":"10|eosc________::4968516eb3b1ad6d883e74a84827e963"}
|
||||
{"datasource":"eosc________::eodc::eodc.data_catalogue_service","master":"10|eosc________::21c44a2b6946e02300dbe36a8edec650"}
|
||||
{"datasource":"eosc________::eodc::eodc.jupyterhub_for_global_copernicus_data","master":"10|eosc________::f99ccd68bf3de6a0a3b0db3441a41bbd"}
|
||||
{"datasource":"eosc________::eosc-dih::eosc-dih.piloting_and_co-design_of_the_business_pilots","master":"10|eosc________::178f3e4832afe9e477d761d2f3d95f85"}
|
||||
{"datasource":"eosc________::eox::eox.edc_eoxhub_workspace","master":"10|eosc________::d71468878e069cf484fc988d276c6d9a"}
|
||||
{"datasource":"eosc________::esa-int::esa-int.geoss_web_portal","master":"10|eosc________::d7bac1ce234c20e3ab43a74eefa34782"}
|
||||
{"datasource":"eosc________::esrf::esrf.the_european_synchrotron_radiation_facility_data_portal","master":"10|fairsharing_::2996962656838a97af4c5f926fe6f1b0"}
|
||||
{"datasource":"eosc________::ess::ess.pan-learning-org","master":"10|eosc________::1298286d3a7cc48fa525b118218c7836"}
|
||||
{"datasource":"eosc________::ess_eric::ess_eric.european_social_survey_ess_as_a_service","master":"10|eosc________::faa60b95b602690861be9305812a5c07"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2access","master":"10|eosc________::4dee0695b946b545dc8d52c56598fbbf"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2drop","master":"10|eosc________::4c6a514f1392ac1d159214e61785849a"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2find","master":"10|eosc________::6069f46dfcc89ccf8043581c9034558e"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2handle","master":"10|eosc________::a23be7f6265fd1ad957eed16b5c8bdc4"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2note","master":"10|eosc________::dfd1d6816b4182e25e84f6cf10d108ed"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2safe","master":"10|re3data_____::a632666349a0bb9a36096c9e152d34cc"}
|
||||
{"datasource":"eosc________::eudat::eudat.b2share","master":"10|eosc________::f959324bdb00f052d547b95da205062f"}
|
||||
{"datasource":"eosc________::eurac::eurac.edp-portal_-_metadata_catalogue_of_eurac_research","master":"10|eosc________::274d73061a925a29d8743b3e1022d0dc"}
|
||||
{"datasource":"eosc________::europeana::europeana.europeana_apis","master":"10|eosc________::91de8c90ebde3dc1c8d41f339fe3fac7"}
|
||||
{"datasource":"eosc________::exoscale::exoscale.european_cloud_hosting","master":"10|eosc________::12b7e6fef784084b817a42f2990fe3f2"}
|
||||
{"datasource":"eosc________::expertai::expertai.document_enrichment_api","master":"10|eosc________::6812b902471f12506c8e6441195aff57"}
|
||||
{"datasource":"eosc________::expertai::expertai.recommender_api","master":"10|eosc________::c40634543c1217686f0a8f5e8592d100"}
|
||||
{"datasource":"eosc________::expertai::expertai.search_api","master":"10|eosc________::79440bc8082949f56cbabef796cec7f1"}
|
||||
{"datasource":"eosc________::fairdi::fairdi.nomad_repository","master":"10|eosc________::b9000c95a6fde9930ae74f4071e14cb2"}
|
||||
{"datasource":"eosc________::figshare::figshare.figshare","master":"10|eosc________::5e6bd062c6b85e2d176b2e61636b8971"}
|
||||
{"datasource":"eosc________::forschungsdaten::forschungsdaten.forschungsdateninfo","master":"10|eosc________::c9185fdb68af7d515e56054da546bc94"}
|
||||
{"datasource":"eosc________::forth::forth.openbioeu","master":"10|eosc________::2db71171816e994877fb960b9fcd89f2"}
|
||||
{"datasource":"eosc________::fssda::fssda.data_service_portal_aila","master":"10|eosc________::ef1f75ea6d244563bc6cfb0c3d3affa4"}
|
||||
{"datasource":"eosc________::fssda::fssda.kuha2_metadata_server","master":"10|eosc________::b6af28d7c292dbbe816cd0d6a9a66f16"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.collections_registry","master":"10|eosc________::ac6da0cfbd07f8605c57a799c41dc947"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.e-Learning_platform","master":"10|eosc________::9059ca88ca8292881ffba9ad8d943d04"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.images_portal","master":"10|eosc________::6991e5dd230956156129669934798cd8"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.occurrence_records","master":"10|eosc________::948a9a53e2a9c94d32f99785eccff662"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.regions_module","master":"10|eosc________::11189c308854c8d8113161edc7fbd3de"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.spatial_portal","master":"10|eosc________::665f73f5e4b6a3693fec9426a6ce6ae8"}
|
||||
{"datasource":"eosc________::gbif-es::gbif-es.species_portal","master":"10|eosc________::9fe2f2ccb3d17452bd6e7424f60340ce"}
|
||||
{"datasource":"eosc________::gbif::gbif.gbif_species_occurrence_data","master":"10|fairsharing_::6e5025ccc7d638ae4e724da8938450a6"}
|
||||
{"datasource":"eosc________::gbif_portugal::gbif_portugal.gbif_portugal_occurrence_records","master":"10|eosc________::fcd4f4efdecb4e675fdee043043f69fc"}
|
||||
{"datasource":"eosc________::gcc_umcg::gcc_umcg.molgenis","master":"10|eosc________::7f255ebbb3715f258e8d7c470209e675"}
|
||||
{"datasource":"eosc________::geant::geant.clouds_service_infrastructure_as_a_service","master":"10|eosc________::7debc69506a8019515d350707e8c82d7"}
|
||||
{"datasource":"eosc________::geant::geant.edugain","master":"10|eosc________::3ded12106e7e870242f7ec39345b3b97"}
|
||||
{"datasource":"eosc________::geant::geant.edumeet_-_webbased_videoconferencing_platform","master":"10|eosc________::dcf8b262f7f61d44eedf409a29d30abc"}
|
||||
{"datasource":"eosc________::geant::geant.eduroam","master":"10|eosc________::e7fd04aab1f224aaa2b5d3478694748b"}
|
||||
{"datasource":"eosc________::geant::geant.eduteams","master":"10|eosc________::f3b04fa1e741f17a842fcbea35e04318"}
|
||||
{"datasource":"eosc________::geant::geant.eduvpn_-_access_your_institutes_network_or_the_internet_using_an_encrypted_connection","master":"10|eosc________::aeb7c573f2742ec5ef8b7332b6b614cb"}
|
||||
{"datasource":"eosc________::geant::geant.inacademia","master":"10|eosc________::26cb3be539a5bbb25533d3b1bdb9d6aa"}
|
||||
{"datasource":"eosc________::geant::geant.ip","master":"10|eosc________::59cd8dbce2703f4eea69a54a959aae89"}
|
||||
{"datasource":"eosc________::geant::geant.l3vpn","master":"10|eosc________::1e70cff61071ce42baffa6dafaf3165e"}
|
||||
{"datasource":"eosc________::geant::geant.lambda","master":"10|eosc________::20a8114b376bf4c455c034b7b4513805"}
|
||||
{"datasource":"eosc________::geant::geant.mdvpn","master":"10|eosc________::54fbf0ac4e42a2ce51e400d9783b51ba"}
|
||||
{"datasource":"eosc________::geant::geant.open","master":"10|eosc________::9ae24d8c63e9ff986fbd20705b334919"}
|
||||
{"datasource":"eosc________::geant::geant.perfsonar","master":"10|eosc________::1bdda4f743377914fabd0f365a8b6ee2"}
|
||||
{"datasource":"eosc________::geant::geant.plus","master":"10|eosc________::eef45e860d52aff4932f254599d5b713"}
|
||||
{"datasource":"eosc________::geant::geant.transits_training","master":"10|eosc________::831e2b596060c60d7d4bc79c200a2254"}
|
||||
{"datasource":"eosc________::geant::geant.trusted_certificate_service","master":"10|eosc________::30817adfb6c625d7fd36b657e2fabc74"}
|
||||
{"datasource":"eosc________::geant::geant.wifimon","master":"10|eosc________::6116f3b14f34658593529f6810068c4e"}
|
||||
{"datasource":"eosc________::genias::genias.e-irg_knowledge_base","master":"10|eosc________::ddc5ab67fed353917716eb2d5c86ce68"}
|
||||
{"datasource":"eosc________::gesis::gesis.doi_registration_service","master":"10|eosc________::71f37a7ebd8495a59c46e637ee5463da"}
|
||||
{"datasource":"eosc________::grnet::grnet.agora_resource_portfolio_management_tool","master":"10|eosc________::461aa754c52b7eed605f9e0955470de5"}
|
||||
{"datasource":"eosc________::grnet::grnet.argo_monitoring_engine","master":"10|eosc________::e91a3b4dfb62113b9b67b0ac97e566b4"}
|
||||
{"datasource":"eosc________::grnet::grnet.aris","master":"10|eosc________::6b381464ec768e3cf55ccacdb00b5988"}
|
||||
{"datasource":"eosc________::grnet::grnet.aris_-_archival_service","master":"10|eosc________::32158f91e33cf6fb6c63561cbc7ffd24"}
|
||||
{"datasource":"eosc________::grnet::grnet.ni4os-europe_login","master":"10|eosc________::aeaa8f7fc2948930bfa4f970cd96837e"}
|
||||
{"datasource":"eosc________::grnet::grnet.ni4os-europe_repository_service","master":"10|eosc________::d6933cb7acd6fa7a2f7a42562c432fb5"}
|
||||
{"datasource":"eosc________::grycap::grycap.elastic_cloud_compute_cluster","master":"10|eosc________::c6d3c380ce5499d8d20cc9bbeb3b43ff"}
|
||||
{"datasource":"eosc________::grycap::grycap.infrastructure_manager","master":"10|eosc________::e8a2eeb06a205c3299af49f5c233ce16"}
|
||||
{"datasource":"eosc________::grycap::grycap.saps_surface_energy_balance_automated_processing_service","master":"10|eosc________::a7ae875b2487576c35f1bc8e1c857c14"}
|
||||
{"datasource":"eosc________::hn::hn.isidore","master":"10|re3data_____::fabe5c1aaa2e2d4c847e01647b87bf60"}
|
||||
{"datasource":"eosc________::hostkey::hostkey.gpu_servers_grant_program","master":"10|eosc________::d45f87107eb536b4be97e112fac15787"}
|
||||
{"datasource":"eosc________::icos_eric::icos_eric.data_discovery_and_access_portal","master":"10|eosc________::84ada2e91828ce72fa6d02736cdd90f1"}
|
||||
{"datasource":"eosc________::ifca-csic::ifca-csic.deepaas_training_facility","master":"10|eosc________::5414e2342e67d64b11b835e7fd58869d"}
|
||||
{"datasource":"eosc________::ifca-csic::ifca-csic.ifca-csic_cloud_infrastructure","master":"10|eosc________::838e5c334e8115e4831d5f21435aa19b"}
|
||||
{"datasource":"eosc________::ifca-csic::ifca-csic.plant_classification","master":"10|eosc________::32c26f83acaef8d89cc6c7a2f8abd198"}
|
||||
{"datasource":"eosc________::ifca-csic::ifca-csic.remote_monitoring_and_smart_sensing","master":"10|eosc________::0335d29ec68ef9ebad8326cba79455f2"}
|
||||
{"datasource":"eosc________::ifin-hh::ifin-hh.cloudifin","master":"10|eosc________::04d791df0b61b0f5060f241c70924991"}
|
||||
{"datasource":"eosc________::iisas::iisas.dynamic_dns_service","master":"10|eosc________::2381e3b55d048130f2dffd437123d501"}
|
||||
{"datasource":"eosc________::iisas::iisas.fedcloudclient_egi_fedcloud_client","master":"10|eosc________::3668885b6512a039673b9f4638c88600"}
|
||||
{"datasource":"eosc________::iisas::iisas.modelling_service_for_water_supply_systems","master":"10|eosc________::b1d6d2cebddf52f6647102a30690fba9"}
|
||||
{"datasource":"eosc________::ill::ill.ill_data_portal","master":"10|eosc________::714498cf1efec13c2206db4b1e4f1c30"}
|
||||
{"datasource":"eosc________::ill::ill.panosc_software_catalogue","master":"10|eosc________::bc63c5a78abd38a7d9df043e0853a9ce"}
|
||||
{"datasource":"eosc________::inaf::inaf.space-ml_caesar_service","master":"10|eosc________::ba42c5e4332ff16c6cd28573012bc2f9"}
|
||||
{"datasource":"eosc________::inaf::inaf.space-vis_vialactea_service","master":"10|eosc________::ce2ca563bceae686b763326ed53e7b54"}
|
||||
{"datasource":"eosc________::infn::infn.dynamic_on_demand_analysis_service","master":"10|eosc________::f884894e05c5a54646f0b5715e5495d6"}
|
||||
{"datasource":"eosc________::infn::infn.fgsg_science_software_on_demand","master":"10|eosc________::452af4e76a64b6ee7e4bdc86527687f7"}
|
||||
{"datasource":"eosc________::infn::infn.indigo_identity_and_access_management","master":"10|eosc________::d23115c40a4e256725f140330d001861"}
|
||||
{"datasource":"eosc________::infn::infn.infn-cloud_object_storage_dice","master":"10|eosc________::fe0c28e8657cb84e3b775156106c03d1"}
|
||||
{"datasource":"eosc________::infn::infn.paas_orchestrator","master":"10|eosc________::146240bb16057a93e11631edee570f76"}
|
||||
{"datasource":"eosc________::infrafrontier::infrafrontier.training_in_mouse_functional_genomics","master":"10|eosc________::64d6597d10f4e617152f4a612a87eaba"}
|
||||
{"datasource":"eosc________::inria::inria.software_heritage_archive","master":"10|fairsharing_::2c758933af02c0b301906f2819ae1268"}
|
||||
{"datasource":"eosc________::jelastic::jelastic.platform-as-a-service","master":"10|eosc________::bfcae4ab00df41a3c43efbb879586e8f"}
|
||||
{"datasource":"eosc________::kit::kit.eosc-performance","master":"10|eosc________::e52ab75587c1dd98db80568197f04586"}
|
||||
{"datasource":"eosc________::kit::kit.o3as_ozone_assessment","master":"10|eosc________::aaf27a5f35a790617247abecd84b100f"}
|
||||
{"datasource":"eosc________::komanord::komanord.guardomic","master":"10|eosc________::b1e06c9d2c472e9441ee72e83a934d40"}
|
||||
{"datasource":"eosc________::lago::lago.onedatasim","master":"10|eosc________::2b2163e8b82320fed69a017a3e5fb657"}
|
||||
{"datasource":"eosc________::lifewatch-eric::lifewatch-eric.plants_identification_app","master":"10|eosc________::6fc6ed0894391496d3c4967d45933d1a"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.elixirfm","master":"10|eosc________::6dd7c323776a028cef0619cb34bdf48c"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.ker_-_keyword_extractor","master":"10|eosc________::09915f038900aa43cb0c76aa89f10cda"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.lindatclariah-cz_repository","master":"10|eosc________::3daee6a29fb1d9a0f624cdd5973c33ea"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.machine_translation","master":"10|eosc________::3ae4551729381cfd03c433fb0de0c971"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.morphodita","master":"10|eosc________::f2ceebdc1a41d65504ff27f7297c833b"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.nametag","master":"10|eosc________::71e3226e7a868e2215335ffb29073285"}
|
||||
{"datasource":"eosc________::lindatclariah-cz::lindatclariah-cz.udpipe_tool_for_lemmatization_morphological_analysis_pos_tagging_and_dependency_parsing_in_multiple_languages","master":"10|eosc________::2dfc64c2951d9be3f1e2b576633ea425"}
|
||||
{"datasource":"eosc________::lnec::lnec-pt.opencoasts_portal","master":"10|eosc________::7e99655aeda0b5f06efb3eea424dff54"}
|
||||
{"datasource":"eosc________::lnec::lnec.worsica_-_water_monitoring_sentinel_cloud_platform","master":"10|eosc________::c2f55ab774c3cbbd9a330eebaa74dc36"}
|
||||
{"datasource":"eosc________::materialscloud::materialscloud.aiiDA_lab","master":"10|eosc________::dfd970a812cf2e0298eb28c681bc109f"}
|
||||
{"datasource":"eosc________::materialscloud::materialscloud.materials_cloud_archive","master":"10|fairsharing_::a431d70133ef6cf688bc4f6093922b48"}
|
||||
{"datasource":"eosc________::meeo::meeo.adam_platform","master":"10|eosc________::b17fedb87dd9985b6a5e51db593446d6"}
|
||||
{"datasource":"eosc________::meeo::meeo.adam_space","master":"10|eosc________::24bfbca4cf4fedc5a4a662fe67a30d7e"}
|
||||
{"datasource":"eosc________::mobile_observation_integration_service::mobile_observation_integration_service.dark_sky_meter_datasource","master":"10|eosc________::160638e73224aeb7e4f98fd237672919"}
|
||||
{"datasource":"eosc________::msw::msw.polaris_os","master":"10|eosc________::12348ba5b2c5902fd400cb3f1ab773ee"}
|
||||
{"datasource":"eosc________::obp::obp.thoth","master":"10|eosc________::680198ec3f51a744de8a7603d542a0e1"}
|
||||
{"datasource":"eosc________::openaire::openaire.amnesia","master":"10|eosc________::ac57e2dd5b3ee01909d7a592523bb96f"}
|
||||
{"datasource":"eosc________::openaire::openaire.argos","master":"10|eosc________::92145beb3257af0510ee61ef10d16870"}
|
||||
{"datasource":"eosc________::openaire::openaire.broker","master":"10|eosc________::c8c6e8d211d6df4ee8a187fa1134bd92"}
|
||||
{"datasource":"eosc________::openaire::openaire.data_provider_dashboard","master":"10|eosc________::809d4c77a7acf9ac0cc2990d4264ae51"}
|
||||
{"datasource":"eosc________::openaire::openaire.digital_humanities_and_cultural_heritage_openaire_community_gateway","master":"10|eosc________::b9110e9735dd467abc969fe8e2f1efa3"}
|
||||
{"datasource":"eosc________::openaire::openaire.discovery_portal","master":"10|eosc________::992052173b689c8cea94e8e8d99f0238"}
|
||||
{"datasource":"eosc________::openaire::openaire.european_marine_science_openaire_dashboard","master":"10|eosc________::950a99851df85c90ec2e933e1d55e164"}
|
||||
{"datasource":"eosc________::openaire::openaire.funder_dashboard","master":"10|eosc________::196eea80ab9d73766cd2e8b6ab85872f"}
|
||||
{"datasource":"eosc________::openaire::openaire.graph","master":"10|eosc________::c122caed52a88b57732b814a74141000"}
|
||||
{"datasource":"eosc________::openaire::openaire.greek_sustainable_development_solutions_network_sdsn_openaire_dashboard","master":"10|eosc________::8100e41e3a5b18170bc5ede2cc393331"}
|
||||
{"datasource":"eosc________::openaire::openaire.inference","master":"10|eosc________::c491811e9a6afa69cdcab0f92fca6f7b"}
|
||||
{"datasource":"eosc________::openaire::openaire.neuroinformatics_openaire_dashboard","master":"10|eosc________::6e3adcce4d0d4229a9749584dfd5e7a8"}
|
||||
{"datasource":"eosc________::openaire::openaire.open_science_helpdesk","master":"10|eosc________::d66db88d4c6c354fe7ebcd4c3dce334e"}
|
||||
{"datasource":"eosc________::openaire::openaire.open_science_observatory","master":"10|eosc________::441ee64860eb79808b7cf0bb08262be6"}
|
||||
{"datasource":"eosc________::openaire::openaire.open_science_training","master":"10|eosc________::99847506cdff50afa4945d60a9661ea3"}
|
||||
{"datasource":"eosc________::openaire::openaire.openaire_login","master":"10|eosc________::818973a9375c0fa545499e1bb9ad0ab2"}
|
||||
{"datasource":"eosc________::openaire::openaire.openapc","master":"10|eosc________::a28cc193bc938573e892b8aad0017702"}
|
||||
{"datasource":"eosc________::openaire::openaire.research_community_dashboard","master":"10|eosc________::e1a866322f76407fb161a253dc5b539c"}
|
||||
{"datasource":"eosc________::openaire::openaire.scholexplorer","master":"10|eosc________::6b34adede04121175566ef8c70f1e520"}
|
||||
{"datasource":"eosc________::openaire::openaire.technical_support_towards_openaire_compliance","master":"10|eosc________::cdb8e94b386f9b6780a47194bd1bc7f7"}
|
||||
{"datasource":"eosc________::openaire::openaire.topos_observatory_for_organisations","master":"10|eosc________::a7d2b95257273b5ea3f3a23fd8a60d48"}
|
||||
{"datasource":"eosc________::openaire::openaire.usage_statistics","master":"10|eosc________::8aa345dc7321fc97906bf4c193a05a8f"}
|
||||
{"datasource":"eosc________::openaire::openaire.validator","master":"10|eosc________::f2c13efbaa2a33af3e4e6a54805ac379"}
|
||||
{"datasource":"eosc________::openaire::openaire.zenodo","master":"10|opendoar____::358aee4cc897452c00244351e4d91f69"}
|
||||
{"datasource":"eosc________::openbiomaps::openbiomaps.openbiomaps","master":"10|eosc________::32edf5a4edbdea0899d6ba588d083efd"}
|
||||
{"datasource":"eosc________::openedition::openedition.operas_research_for_society","master":"10|eosc________::2cdf4f57007b990b7ad7a884796f9b15"}
|
||||
{"datasource":"eosc________::openknowledgemaps::openknowledgemaps.open_knowledge_maps","master":"10|eosc________::f3819d0f8e8bf57d383b23d31a3c0099"}
|
||||
{"datasource":"eosc________::openminted::openminted.builder_of_tdm_applications","master":"10|eosc________::fdd26c19dd490260bc6c48b5813f4ac3"}
|
||||
{"datasource":"eosc________::openminted::openminted.catalogue_of_ancillary_resources","master":"10|eosc________::ab4e37e85a1975b204b66683ed3888a8"}
|
||||
{"datasource":"eosc________::openminted::openminted.catalogue_of_corpora","master":"10|eosc________::2cf744a594ea30fd31e976bffa8f2b71"}
|
||||
{"datasource":"eosc________::openminted::openminted.catalogue_of_tdm_applications","master":"10|eosc________::ef5f343c5cf11fa2d40407ec308bb34a"}
|
||||
{"datasource":"eosc________::openminted::openminted.catalogue_of_tdm_components","master":"10|eosc________::4275243a94677f19a5b74e5afb1f94cf"}
|
||||
{"datasource":"eosc________::openminted::openminted.consulting_on_licences_for_tdm","master":"10|eosc________::522000b4c90b209aa7be961449ca910f"}
|
||||
{"datasource":"eosc________::openminted::openminted.corpus_builder_for_scholarly_works","master":"10|eosc________::c64725d47af63bc2114b4214b684a392"}
|
||||
{"datasource":"eosc________::openminted::openminted.support_and_training","master":"10|eosc________::84501ff99e5e429f5f083ab8ca0be7e4"}
|
||||
{"datasource":"eosc________::openminted::openminted.tdm_applications_executor","master":"10|eosc________::e9ae655ce2ff1eaa19d0b3475ce5e660"}
|
||||
{"datasource":"eosc________::operas::operas.gotriple_discovery_platform","master":"10|eosc________::f687e24dc56aaeeb561c95865a5071cc"}
|
||||
{"datasource":"eosc________::operas::operas.operas_metrics_service","master":"10|eosc________::5960e1289f623625210f720c6173592d"}
|
||||
{"datasource":"eosc________::oslo_university::oslo_university.services_for_sensitive_data_tsd","master":"10|eosc________::743b01351510f88e24be1c700c581f68"}
|
||||
{"datasource":"eosc________::osmooc::osmooc.open_science_mooc","master":"10|eosc________::e101101e8653b6607a3ad9fea3b7d1fe"}
|
||||
{"datasource":"eosc________::oxford_e-research_centre::oxford_e-research_centre.fairsharing","master":"10|openaire____::bf5a61cc330e21ffa90eed3eb1533466"}
|
||||
{"datasource":"eosc________::phenomenal::phenomenal.phenomenal","master":"10|eosc________::79e19b14aeee0d94e9a79110a6e6ad32"}
|
||||
{"datasource":"eosc________::plantnet::plantnet.plntnet_identification_service","master":"10|eosc________::5ce89743eafdd8578591d84150f547e4"}
|
||||
{"datasource":"eosc________::prace::prace.application_enabling_support","master":"10|eosc________::c87fd74ed685337fdbcff504373fc513"}
|
||||
{"datasource":"eosc________::prace::prace.code_vault","master":"10|eosc________::dbab7889c81b59ec753040a762f6569a"}
|
||||
{"datasource":"eosc________::prace::prace.deci_access","master":"10|eosc________::c7cedb82b1beea5382601d48807212aa"}
|
||||
{"datasource":"eosc________::prace::prace.mooc","master":"10|eosc________::d6ff8167d31dccebe33a272513422b53"}
|
||||
{"datasource":"eosc________::prace::prace.patc","master":"10|eosc________::1ab1b123bd559ee7f7c7ec2ee353f0c0"}
|
||||
{"datasource":"eosc________::prace::prace.preparatory_access","master":"10|eosc________::39430adf529f1ab9e33da444b3708fcf"}
|
||||
{"datasource":"eosc________::prace::prace.project_access","master":"10|eosc________::b58e957946983b686c76ee19dfab8d70"}
|
||||
{"datasource":"eosc________::prace::prace.ptc","master":"10|eosc________::b3ca18e8884bfe2422d3723313fef79c"}
|
||||
{"datasource":"eosc________::prace::prace.seasonal_schools_and_international_summer_school","master":"10|eosc________::590c71318d9d94c32981e3195567d546"}
|
||||
{"datasource":"eosc________::prace::prace.shape","master":"10|eosc________::38b5a26f74e4808270a2d4f305d2f3a5"}
|
||||
{"datasource":"eosc________::prace::prace.training_portal","master":"10|eosc________::25966a269ab2343ac9c4d982c341d87f"}
|
||||
{"datasource":"eosc________::predictia::predictia.climadjust","master":"10|eosc________::14743eb22da3524893784faf409aac70"}
|
||||
{"datasource":"eosc________::psi::psi.psi_public_data_repository","master":"10|re3data_____::1e55174ff77ed2d804871281201dbb50"}
|
||||
{"datasource":"eosc________::psi::psi.remote_desktop_service","master":"10|eosc________::c82e26eb6e65d008de03b349dffc11fc"}
|
||||
{"datasource":"eosc________::psnc::psnc.rohub","master":"10|eosc________::c87f08707b5235172e85b374e39a82dc"}
|
||||
{"datasource":"eosc________::psnc::psnc.symbiote","master":"10|eosc________::ef0cd965a0d0a3df80ecfae4b3b08aad"}
|
||||
{"datasource":"eosc________::rasdaman::rasdaman.datacube","master":"10|eosc________::bb1678f7b15d8c15fde6e240a4f95f93"}
|
||||
{"datasource":"eosc________::rbi::rbi.dariah_science_gateway","master":"10|eosc________::b51b448421d926293b3781f4ac90f4f4"}
|
||||
{"datasource":"eosc________::readcoop::readcoop.transkribus","master":"10|eosc________::a80411026809e6eaa896439e1b9764f4"}
|
||||
{"datasource":"eosc________::rli::rli.open_energy_platform","master":"10|fairsharing_::0cbed40c0d920b94126eaf5e707be1f5"}
|
||||
{"datasource":"eosc________::ror-org::ror-org.identifier","master":"10|eosc________::6fe92c2346db22322ddf6b677d449b0e"}
|
||||
{"datasource":"eosc________::sciences_po::sciences_po.ethnic_and_migrant_minority_survey_registry","master":"10|eosc________::0cde986dc2bf015912e407f0f83ee402"}
|
||||
{"datasource":"eosc________::sciences_po::sciences_po.wpss_for_ess","master":"10|eosc________::9a5bb11c495443aad944b04f5fcb5c07"}
|
||||
{"datasource":"eosc________::scigne::scigne.cloud_compute","master":"10|eosc________::7c63e3284c36b5977c553192dce506b3"}
|
||||
{"datasource":"eosc________::scipedia::scipedia.scipedia","master":"10|eosc________::850abcddc76069f2c3c1cf77ad4beec9"}
|
||||
{"datasource":"eosc________::scipedia::scipedia.topos_for_individuals","master":"10|eosc________::e6214b58f39a25b53eecda340f95ee7b"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.doi_minting_service","master":"10|eosc________::f87f72147a3c82c4f77684e40101e90e"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_environmental_data_edmed","master":"10|eosc________::d79706389f0b864306feb47aac1f5766"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_environmental_research_projects","master":"10|eosc________::baa9d2d6cdd8507fcbf76242e4c25d76"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_marine_organisations_edmo","master":"10|eosc________::5d23c66c26e0df209fc415c1e9ad0316"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_the_cruise_summary_reports_csr","master":"10|eosc________::fd70912c66037dc11f710587e281eeaf"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.european_directory_of_the_initial_ocean-observing_systems_edios","master":"10|eosc________::846016e987d1feaf2a36083f88dba1f2"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.seadatanet_cdi","master":"10|eosc________::36cd158d6b1bbdbfb443c68b8da00335"}
|
||||
{"datasource":"eosc________::seadatanet::seadatanet.vocabulary_services_-_underpinned_by_the_nerc_vocabulary_server_nvs","master":"10|eosc________::4416d18ec7a57e553979fbfa4d862483"}
|
||||
{"datasource":"eosc________::sinergise::sinergise.sentinel_hub","master":"10|eosc________::d36ae944fa207461bcb7b2b3a6c94de8"}
|
||||
{"datasource":"eosc________::sixsq::sixsq.nuvla_multi-cloud_application_management_platform","master":"10|eosc________::38438cc3190a3815359efb53b9dd98eb"}
|
||||
{"datasource":"eosc________::sks::sks.digital_production_for_conferences_workshops_roundtables_and_other_academic_and_professional_events","master":"10|eosc________::f6b51bef4a5f1478e980673339f2b2f3"}
|
||||
{"datasource":"eosc________::smartsmear::smartsmear.smartsmear","master":"10|eosc________::d17a9325ca64ffad59e04659ed5404f7"}
|
||||
{"datasource":"eosc________::sobigdata::sobigdata.tagme","master":"10|eosc________::0c3b8b80d9d6d38effd28bfa6a140a12"}
|
||||
{"datasource":"eosc________::suite5::suite5.furniture_enterprise_analytics","master":"10|eosc________::29ed60070bd91bdc19c9f278b104465c"}
|
||||
{"datasource":"eosc________::switch::switch.switchengines","master":"10|eosc________::d4143918a810115206640cfeb11e0ba6"}
|
||||
{"datasource":"eosc________::t-systems::t-systems.open_telekom_cloud","master":"10|eosc________::c489ef6564a47922359f7b833919d642"}
|
||||
{"datasource":"eosc________::terradue::terradue.eo_services_for_earthquake_response_and_landslides_analysis","master":"10|eosc________::ab3140d145deb5fdb02eeefbc5ebc471"}
|
||||
{"datasource":"eosc________::tib::tib.open_research_knowledge_graph_orkg","master":"10|eosc________::ed6bd695c7a99297f360bc2fc915be90"}
|
||||
{"datasource":"eosc________::ubora::ubora.ubora","master":"10|eosc________::bacf05aff1c6dcf3133a0352d5eb14c4"}
|
||||
{"datasource":"eosc________::ubora::ubora.ubora_e-platform","master":"10|eosc________::947fde33605ba61216a07135ee1551f2"}
|
||||
{"datasource":"eosc________::ugr-es::ugr-es.glacier_lagoons_of_sierra_nevada","master":"10|eosc________::8a966c0efca298ad5ec130d323c29935"}
|
||||
{"datasource":"eosc________::uit::uit.dataverseno","master":"10|eosc________::92b76aa81a5b8443fcf17d3ae3c34211"}
|
||||
{"datasource":"eosc________::uit::uit.the_troms_repository_of_language_and_linguistics_trolling","master":"10|fairsharing_::a36b0dcd1e6384abc0e1867860ad3ee3"}
|
||||
{"datasource":"eosc________::ukaea::ukaea.prominence","master":"10|eosc________::06ce999c7cf77ea5a65f87bb563cd625"}
|
||||
{"datasource":"eosc________::ukri_-_stfc::ukri_-_stfc.cvmfs_test","master":"10|eosc________::53aaa0a24d0edc47c23e722135c29dde"}
|
||||
{"datasource":"eosc________::ukri_-_stfc::ukri_-_stfc.rucio","master":"10|eosc________::c19a8251c6bf563365c555572ace903e"}
|
||||
{"datasource":"eosc________::uni-freiburg::uni-freiburg.european_galaxy_server","master":"10|eosc________::cc00fc2385475b80accec001dfb85efb"}
|
||||
{"datasource":"eosc________::unibo::unibo.opencitations","master":"10|eosc________::573c29ecaf76ab961743bfc8a7d911ec"}
|
||||
{"datasource":"eosc________::unifl::unifl.snap4city","master":"10|eosc________::9a55c40c3c082b7a8352ecbc56a87996"}
|
||||
{"datasource":"eosc________::unige::unige.astronomical_online_data_analysis_astrooda","master":"10|eosc________::63f6119d3170cccf979daada3c5b524e"}
|
||||
{"datasource":"eosc________::unitartu::unitartu.ut.rocket","master":"10|eosc________::da3450589a9d56212963b20cf729974c"}
|
||||
{"datasource":"eosc________::upv-es::upv-es.lemonade","master":"10|eosc________::afdd227beada491f77d7944d7a0eafc9"}
|
||||
{"datasource":"eosc________::vamdc::vamdc.portal","master":"10|eosc________::4dab2bb6e9a9ad223cd63c62c2ea804e"}
|
||||
{"datasource":"eosc________::vamdc::vamdc.query_store","master":"10|eosc________::33f18bfe544c3c84ac28be6a3292d166"}
|
||||
{"datasource":"eosc________::vamdc::vamdc.species_database","master":"10|eosc________::ae3587682dec5663a1b3b625036d15d0"}
|
||||
{"datasource":"eosc________::vilnius-university::vilnius-university.the_national_open_access_research_data_archive_midas","master":"10|eosc________::4987ee0d071f68cf88f6b1a834b6733f"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.amber-based_portal_server_for_nmr_structures_amps-nmr","master":"10|eosc________::c6cca9747ef3ce296bd626bcbc4e480a"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.disvis_web_portal","master":"10|eosc________::2539ec693b683284c4e243b969ae3fc0"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.fanten_finding_anisotropy_tensor","master":"10|eosc________::99c793e3f3b856c48eaaa36682038b28"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.haddock24_web_portal","master":"10|eosc________::0f198f6a0885105809f420be23614be3"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.metalpdb","master":"10|eosc________::84676bc3d2ce17de70309dc58f428296"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.pdb-tools_web","master":"10|eosc________::b37eed45624ac30f3476f71640e59a61"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.powerfit_web_portal","master":"10|eosc________::93d4d621ed1da378c0e7dc891cefc007"}
|
||||
{"datasource":"eosc________::wenmr::wenmr.spoton","master":"10|eosc________::76e7e0552f9c6b89db94b31ddc366b9f"}
|
|
@ -1,11 +1,11 @@
|
|||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e42","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"crissystem","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"NARCIS"},"extraInfo":[],"id":"10|eurocrisdris::fe4903425d9040f680d8610d9079ea14","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::institutional","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"BELARUS"},"extraInfo":[],"id":"10|opendoar____::fd272fe04b7d4e68effd01bddcc6bb34","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::institutional","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"UNIGE"},"extraInfo":[],"id":"10|opendoar____::f0dd4a99fba6075a9494772b58f95280","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"crissystem","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"TUHH"},"extraInfo":[],"id":"10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e43","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e44","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e45","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e46","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e47","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e48","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"knowledgegraph":false,"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e42","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"crissystem","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"NARCIS"},"extraInfo":[],"id":"10|eurocrisdris::fe4903425d9040f680d8610d9079ea14","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::institutional","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"BELARUS"},"extraInfo":[],"id":"10|opendoar____::fd272fe04b7d4e68effd01bddcc6bb34","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::institutional","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"UNIGE"},"extraInfo":[],"id":"10|opendoar____::f0dd4a99fba6075a9494772b58f95280","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"crissystem","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"TUHH"},"extraInfo":[],"id":"10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e43","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e44","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e45","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e46","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e47","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
{"accessinfopackage":[],"collectedfrom":[{"key":"10|openaire____::081b82f96300b6a6e3d282bad31cb6e2","value":"Crossref"}],"contentpolicies":[{"classid":"Journal articles","classname":"Journal articles","schemeid":"eosc:contentpolicies","schemename":"eosc:contentpolicies"}],"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"dataprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"datasourcetype":{"classid":"pubsrepository::journal","classname":"Journal","schemeid":"dnet:datasource_typologies","schemename":"dnet:datasource_typologies"},"datasourcetypeui":{"classid":"Journal archive","classname":"Journal archive","schemeid":"dnet:datasource_typologies_ui","schemename":"dnet:datasource_typologies_ui"},"dateofcollection":"2020-07-10","englishname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"extraInfo":[],"id":"10|issn___print::a7a2010e75d849442790955162ef4e48","journal":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"issnPrinted":"2447-0546","name":"Estudos Portugueses"},"lastupdatetimestamp":1646230297782,"latitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"longitude":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"namespaceprefix":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"jrnl24470546"},"odcontenttypes":[],"odlanguages":[],"odnumberofitems":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"0.0"},"officialname":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":"Estudos Portugueses"},"openairecompatibility":{"classid":"hostedBy","classname":"collected from a compatible aggregator","schemeid":"dnet:datasourceCompatibilityLevel","schemename":"dnet:datasourceCompatibilityLevel"},"originalId":["issn___print::2447-0546"],"pid":[],"policies":[],"serviceprovider":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false},"subjects":[],"thematic":false,"versioning":{"dataInfo":{"deletedbyinference":false,"inferred":false,"invisible":false,"provenanceaction":{"classid":"sysimport:crosswalk:entityregistry","classname":"Harvested","schemeid":"dnet:provenanceActions","schemename":"dnet:provenanceActions"},"trust":"0.900"},"value":false}}
|
||||
|
|
|
@ -4,7 +4,6 @@ package eu.dnetlib.dhp.oa.graph.clean;
|
|||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -21,15 +20,8 @@ import org.slf4j.LoggerFactory;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
|
||||
public class CleanContextSparkJob implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(CleanContextSparkJob.class);
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
|
||||
public class Constants {
|
||||
|
||||
protected static final Map<String, String> accessRightsCoarMap = Maps.newHashMap();
|
||||
protected static final Map<String, String> coarCodeLabelMap = Maps.newHashMap();
|
||||
|
||||
public static final String INFERRED = "Inferred by OpenAIRE";
|
||||
public static final String CABF2 = "c_abf2";
|
||||
|
||||
public static final String HARVESTED = "Harvested";
|
||||
public static final String DEFAULT_TRUST = "0.9";
|
||||
public static final String USER_CLAIM = "Linked by user";
|
||||
|
||||
public static final String COAR_ACCESS_RIGHT_SCHEMA = "http://vocabularies.coar-repositories.org/documentation/access_rights/";
|
||||
|
||||
public static final String ZENODO_COMMUNITY_PREFIX = "https://zenodo.org/communities/";
|
||||
|
||||
public static final String RESEARCH_COMMUNITY = "Research Community";
|
||||
|
||||
public static final String RESEARCH_INFRASTRUCTURE = "Research Infrastructure/Initiative";
|
||||
|
||||
static {
|
||||
accessRightsCoarMap.put(ModelConstants.ACCESS_RIGHT_OPEN, CABF2);
|
||||
accessRightsCoarMap.put("RESTRICTED", "c_16ec");
|
||||
accessRightsCoarMap.put("OPEN SOURCE", CABF2);
|
||||
accessRightsCoarMap.put(ModelConstants.ACCESS_RIGHT_CLOSED, "c_14cb");
|
||||
accessRightsCoarMap.put(ModelConstants.ACCESS_RIGHT_EMBARGO, "c_f1cf");
|
||||
}
|
||||
|
||||
static {
|
||||
coarCodeLabelMap.put(CABF2, ModelConstants.ACCESS_RIGHT_OPEN);
|
||||
coarCodeLabelMap.put("c_16ec", "RESTRICTED");
|
||||
coarCodeLabelMap.put("c_14cb", ModelConstants.ACCESS_RIGHT_CLOSED);
|
||||
coarCodeLabelMap.put("c_f1cf", "EMBARGO");
|
||||
}
|
||||
|
||||
public enum DUMPTYPE {
|
||||
COMPLETE("complete"), COMMUNITY("community"), FUNDER("funder");
|
||||
|
||||
private final String type;
|
||||
|
||||
DUMPTYPE(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,107 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.NoAvailableEntityTypeException;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
/**
|
||||
* It fires the execution of the actual dump for result entities. If the dump is for RC/RI products its checks for each
|
||||
* result its belongingess to at least one RC/RI before "asking" for its mapping.
|
||||
*/
|
||||
public class DumpProducts implements Serializable {
|
||||
|
||||
public void run(Boolean isSparkSessionManaged, String inputPath, String outputPath, String communityMapPath,
|
||||
Class<? extends OafEntity> inputClazz,
|
||||
Class<? extends eu.dnetlib.dhp.schema.dump.oaf.Result> outputClazz,
|
||||
String dumpType) {
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
execDump(
|
||||
spark, inputPath, outputPath, communityMapPath, inputClazz, outputClazz, dumpType);
|
||||
});
|
||||
}
|
||||
|
||||
public static <I extends OafEntity, O extends eu.dnetlib.dhp.schema.dump.oaf.Result> void execDump(
|
||||
SparkSession spark,
|
||||
String inputPath,
|
||||
String outputPath,
|
||||
String communityMapPath,
|
||||
Class<I> inputClazz,
|
||||
Class<O> outputClazz,
|
||||
String dumpType) {
|
||||
|
||||
CommunityMap communityMap = Utils.getCommunityMap(spark, communityMapPath);
|
||||
|
||||
Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.map((MapFunction<I, O>) value -> execMap(value, communityMap, dumpType), Encoders.bean(outputClazz))
|
||||
.filter((FilterFunction<O>) value -> value != null)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
|
||||
private static <I extends OafEntity, O extends eu.dnetlib.dhp.schema.dump.oaf.Result> O execMap(I value,
|
||||
CommunityMap communityMap,
|
||||
String dumpType) throws NoAvailableEntityTypeException {
|
||||
|
||||
Optional<DataInfo> odInfo = Optional.ofNullable(value.getDataInfo());
|
||||
if (odInfo.isPresent()) {
|
||||
if (odInfo.get().getDeletedbyinference() || odInfo.get().getInvisible()) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (Constants.DUMPTYPE.COMMUNITY.getType().equals(dumpType)) {
|
||||
Set<String> communities = communityMap.keySet();
|
||||
|
||||
Optional<List<Context>> inputContext = Optional
|
||||
.ofNullable(((eu.dnetlib.dhp.schema.oaf.Result) value).getContext());
|
||||
if (!inputContext.isPresent()) {
|
||||
return null;
|
||||
}
|
||||
List<String> toDumpFor = inputContext.get().stream().map(c -> {
|
||||
if (communities.contains(c.getId())) {
|
||||
return c.getId();
|
||||
}
|
||||
if (c.getId().contains("::") && communities.contains(c.getId().substring(0, c.getId().indexOf("::")))) {
|
||||
return c.getId().substring(0, c.getId().indexOf("::"));
|
||||
}
|
||||
return null;
|
||||
}).filter(Objects::nonNull).collect(Collectors.toList());
|
||||
if (toDumpFor.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return (O) ResultMapper.map(value, communityMap, dumpType);
|
||||
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.MakeTarArchive;
|
||||
|
||||
public class MakeTar implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(MakeTar.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
MakeTar.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_maketar_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String outputPath = parser.get("hdfsPath");
|
||||
log.info("hdfsPath: {}", outputPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("input path : {}", inputPath);
|
||||
|
||||
final int gBperSplit = Optional
|
||||
.ofNullable(parser.get("splitSize"))
|
||||
.map(Integer::valueOf)
|
||||
.orElse(10);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
|
||||
makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit);
|
||||
|
||||
}
|
||||
|
||||
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
|
||||
throws IOException {
|
||||
|
||||
RemoteIterator<LocatedFileStatus> dirIterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
||||
|
||||
while (dirIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = dirIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String pathString = p.toString();
|
||||
String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
|
||||
|
||||
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.List;
|
||||
|
||||
import org.dom4j.Document;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.dom4j.Element;
|
||||
import org.dom4j.io.SAXReader;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
|
||||
public class QueryInformationSystem {
|
||||
|
||||
private ISLookUpService isLookUp;
|
||||
|
||||
private static final String XQUERY_ALL = "for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') "
|
||||
+
|
||||
" where $x//CONFIGURATION/context[./@type='community' or ./@type='ri'] " +
|
||||
" and ($x//context/param[./@name = 'status']/text() = 'all') "
|
||||
+
|
||||
" return " +
|
||||
"<community> " +
|
||||
"{$x//CONFIGURATION/context/@id}" +
|
||||
"{$x//CONFIGURATION/context/@label}" +
|
||||
"</community>";
|
||||
|
||||
private static final String XQUERY_CI = "for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') "
|
||||
+
|
||||
" where $x//CONFIGURATION/context[./@type='community' or ./@type='ri'] " +
|
||||
" and $x//CONFIGURATION/context[./@id=%s] "
|
||||
+
|
||||
" return " +
|
||||
"<community> " +
|
||||
"{$x//CONFIGURATION/context/@id}" +
|
||||
"{$x//CONFIGURATION/context/@label}" +
|
||||
"</community>";
|
||||
|
||||
public CommunityMap getCommunityMap(boolean singleCommunity, String communityId)
|
||||
throws ISLookUpException, DocumentException, SAXException {
|
||||
if (singleCommunity)
|
||||
return getMap(isLookUp.quickSearchProfile(XQUERY_CI.replace("%s", "'" + communityId + "'")));
|
||||
return getMap(isLookUp.quickSearchProfile(XQUERY_ALL));
|
||||
|
||||
}
|
||||
|
||||
public ISLookUpService getIsLookUp() {
|
||||
return isLookUp;
|
||||
}
|
||||
|
||||
public void setIsLookUp(ISLookUpService isLookUpService) {
|
||||
this.isLookUp = isLookUpService;
|
||||
}
|
||||
|
||||
private CommunityMap getMap(List<String> communityMap) throws DocumentException, SAXException {
|
||||
final CommunityMap map = new CommunityMap();
|
||||
|
||||
for (String xml : communityMap) {
|
||||
final Document doc;
|
||||
final SAXReader reader = new SAXReader();
|
||||
reader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
doc = reader.read(new StringReader(xml));
|
||||
Element root = doc.getRootElement();
|
||||
map.put(root.attribute("id").getValue(), root.attribute("label").getValue());
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,672 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.NoAvailableEntityTypeException;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.AccessRight;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Author;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.GeoLocation;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Measure;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CfHbKeyValue;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityInstance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
public class ResultMapper implements Serializable {
|
||||
|
||||
public static <E extends eu.dnetlib.dhp.schema.oaf.OafEntity> Result map(
|
||||
E in, Map<String, String> communityMap, String dumpType) throws NoAvailableEntityTypeException {
|
||||
|
||||
Result out;
|
||||
if (Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
out = new GraphResult();
|
||||
} else {
|
||||
out = new CommunityResult();
|
||||
}
|
||||
|
||||
eu.dnetlib.dhp.schema.oaf.Result input = (eu.dnetlib.dhp.schema.oaf.Result) in;
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort = Optional.ofNullable(input.getResulttype());
|
||||
if (ort.isPresent()) {
|
||||
try {
|
||||
|
||||
addTypeSpecificInformation(out, input, ort);
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getAuthor())
|
||||
.ifPresent(
|
||||
ats -> out.setAuthor(ats.stream().map(ResultMapper::getAuthor).collect(Collectors.toList())));
|
||||
|
||||
// I do not map Access Right UNKNOWN or OTHER
|
||||
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oar = Optional.ofNullable(input.getBestaccessright());
|
||||
if (oar.isPresent() && Constants.accessRightsCoarMap.containsKey(oar.get().getClassid())) {
|
||||
String code = Constants.accessRightsCoarMap.get(oar.get().getClassid());
|
||||
out
|
||||
.setBestaccessright(
|
||||
|
||||
BestAccessRight
|
||||
.newInstance(
|
||||
code,
|
||||
Constants.coarCodeLabelMap.get(code),
|
||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
}
|
||||
|
||||
final List<String> contributorList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getContributor())
|
||||
.ifPresent(value -> value.stream().forEach(c -> contributorList.add(c.getValue())));
|
||||
out.setContributor(contributorList);
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getCountry())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setCountry(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
c -> {
|
||||
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
||||
return null;
|
||||
}
|
||||
ResultCountry country = new ResultCountry();
|
||||
country.setCode(c.getClassid());
|
||||
country.setLabel(c.getClassname());
|
||||
Optional
|
||||
.ofNullable(c.getDataInfo())
|
||||
.ifPresent(
|
||||
provenance -> country
|
||||
.setProvenance(
|
||||
Provenance
|
||||
.newInstance(
|
||||
provenance
|
||||
.getProvenanceaction()
|
||||
.getClassname(),
|
||||
c.getDataInfo().getTrust())));
|
||||
return country;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
final List<String> coverageList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getCoverage())
|
||||
.ifPresent(value -> value.stream().forEach(c -> coverageList.add(c.getValue())));
|
||||
out.setCoverage(coverageList);
|
||||
|
||||
out.setDateofcollection(input.getDateofcollection());
|
||||
|
||||
final List<String> descriptionList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getDescription())
|
||||
.ifPresent(value -> value.forEach(d -> descriptionList.add(d.getValue())));
|
||||
out.setDescription(descriptionList);
|
||||
Optional<Field<String>> oStr = Optional.ofNullable(input.getEmbargoenddate());
|
||||
if (oStr.isPresent()) {
|
||||
out.setEmbargoenddate(oStr.get().getValue());
|
||||
}
|
||||
|
||||
final List<String> formatList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getFormat())
|
||||
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
||||
out.setFormat(formatList);
|
||||
out.setId(input.getId());
|
||||
out.setOriginalId(new ArrayList<>());
|
||||
Optional
|
||||
.ofNullable(input.getOriginalId())
|
||||
.ifPresent(
|
||||
v -> out
|
||||
.setOriginalId(
|
||||
input
|
||||
.getOriginalId()
|
||||
.stream()
|
||||
.filter(s -> !s.startsWith("50|"))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
||||
.ofNullable(input.getInstance());
|
||||
|
||||
if (oInst.isPresent()) {
|
||||
if (Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
((GraphResult) out)
|
||||
.setInstance(
|
||||
oInst.get().stream().map(ResultMapper::getGraphInstance).collect(Collectors.toList()));
|
||||
} else {
|
||||
((CommunityResult) out)
|
||||
.setInstance(
|
||||
oInst
|
||||
.get()
|
||||
.stream()
|
||||
.map(ResultMapper::getCommunityInstance)
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
||||
if (oL.isPresent()) {
|
||||
eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
|
||||
out.setLanguage(Language.newInstance(language.getClassid(), language.getClassname()));
|
||||
}
|
||||
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
||||
if (oLong.isPresent()) {
|
||||
out.setLastupdatetimestamp(oLong.get());
|
||||
}
|
||||
Optional<List<StructuredProperty>> otitle = Optional.ofNullable(input.getTitle());
|
||||
if (otitle.isPresent()) {
|
||||
List<StructuredProperty> iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("main title"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setMaintitle(iTitle.get(0).getValue());
|
||||
}
|
||||
|
||||
iTitle = otitle
|
||||
.get()
|
||||
.stream()
|
||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("subtitle"))
|
||||
.collect(Collectors.toList());
|
||||
if (!iTitle.isEmpty()) {
|
||||
out.setSubtitle(iTitle.get(0).getValue());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getPid())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setPid(
|
||||
value
|
||||
.stream()
|
||||
.map(
|
||||
p -> ResultPid
|
||||
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
oStr = Optional.ofNullable(input.getDateofacceptance());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublicationdate(oStr.get().getValue());
|
||||
}
|
||||
oStr = Optional.ofNullable(input.getPublisher());
|
||||
if (oStr.isPresent()) {
|
||||
out.setPublisher(oStr.get().getValue());
|
||||
}
|
||||
|
||||
Optional
|
||||
.ofNullable(input.getSource())
|
||||
.ifPresent(
|
||||
value -> out.setSource(value.stream().map(Field::getValue).collect(Collectors.toList())));
|
||||
|
||||
List<Subject> subjectList = new ArrayList<>();
|
||||
Optional
|
||||
.ofNullable(input.getSubject())
|
||||
.ifPresent(
|
||||
value -> value
|
||||
.forEach(s -> subjectList.add(getSubject(s))));
|
||||
|
||||
out.setSubjects(subjectList);
|
||||
|
||||
out.setType(input.getResulttype().getClassid());
|
||||
|
||||
if (!Constants.DUMPTYPE.COMPLETE.getType().equals(dumpType)) {
|
||||
((CommunityResult) out)
|
||||
.setCollectedfrom(
|
||||
input
|
||||
.getCollectedfrom()
|
||||
.stream()
|
||||
.map(cf -> CfHbKeyValue.newInstance(cf.getKey(), cf.getValue()))
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
Set<String> communities = communityMap.keySet();
|
||||
List<Context> contextList = Optional
|
||||
.ofNullable(
|
||||
input
|
||||
.getContext())
|
||||
.map(
|
||||
value -> value
|
||||
.stream()
|
||||
.map(c -> {
|
||||
String communityId = c.getId();
|
||||
if (communityId.contains("::")) {
|
||||
communityId = communityId.substring(0, communityId.indexOf("::"));
|
||||
}
|
||||
if (communities.contains(communityId)) {
|
||||
Context context = new Context();
|
||||
context.setCode(communityId);
|
||||
context.setLabel(communityMap.get(communityId));
|
||||
Optional<List<DataInfo>> dataInfo = Optional.ofNullable(c.getDataInfo());
|
||||
if (dataInfo.isPresent()) {
|
||||
List<Provenance> provenance = new ArrayList<>();
|
||||
provenance
|
||||
.addAll(
|
||||
dataInfo
|
||||
.get()
|
||||
.stream()
|
||||
.map(
|
||||
di -> Optional
|
||||
.ofNullable(di.getProvenanceaction())
|
||||
.map(
|
||||
provenanceaction -> Provenance
|
||||
.newInstance(
|
||||
provenanceaction.getClassname(),
|
||||
di.getTrust()))
|
||||
.orElse(null))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet()));
|
||||
|
||||
try {
|
||||
context.setProvenance(getUniqueProvenance(provenance));
|
||||
} catch (NoAvailableEntityTypeException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
return context;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList()))
|
||||
.orElse(new ArrayList<>());
|
||||
|
||||
if (!contextList.isEmpty()) {
|
||||
Set<Integer> hashValue = new HashSet<>();
|
||||
List<Context> remainigContext = new ArrayList<>();
|
||||
contextList.forEach(c -> {
|
||||
if (!hashValue.contains(c.hashCode())) {
|
||||
remainigContext.add(c);
|
||||
hashValue.add(c.hashCode());
|
||||
}
|
||||
});
|
||||
((CommunityResult) out).setContext(remainigContext);
|
||||
}
|
||||
}
|
||||
} catch (ClassCastException cce) {
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
|
||||
}
|
||||
|
||||
private static void addTypeSpecificInformation(Result out, eu.dnetlib.dhp.schema.oaf.Result input,
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort) throws NoAvailableEntityTypeException {
|
||||
switch (ort.get().getClassid()) {
|
||||
case "publication":
|
||||
Optional<Journal> journal = Optional
|
||||
.ofNullable(((Publication) input).getJournal());
|
||||
if (journal.isPresent()) {
|
||||
Journal j = journal.get();
|
||||
Container c = new Container();
|
||||
c.setConferencedate(j.getConferencedate());
|
||||
c.setConferenceplace(j.getConferenceplace());
|
||||
c.setEdition(j.getEdition());
|
||||
c.setEp(j.getEp());
|
||||
c.setIss(j.getIss());
|
||||
c.setIssnLinking(j.getIssnLinking());
|
||||
c.setIssnOnline(j.getIssnOnline());
|
||||
c.setIssnPrinted(j.getIssnPrinted());
|
||||
c.setName(j.getName());
|
||||
c.setSp(j.getSp());
|
||||
c.setVol(j.getVol());
|
||||
out.setContainer(c);
|
||||
out.setType(ModelConstants.PUBLICATION_DEFAULT_RESULTTYPE.getClassname());
|
||||
}
|
||||
break;
|
||||
case "dataset":
|
||||
Dataset id = (Dataset) input;
|
||||
Optional.ofNullable(id.getSize()).ifPresent(v -> out.setSize(v.getValue()));
|
||||
Optional.ofNullable(id.getVersion()).ifPresent(v -> out.setVersion(v.getValue()));
|
||||
|
||||
out
|
||||
.setGeolocation(
|
||||
Optional
|
||||
.ofNullable(id.getGeolocation())
|
||||
.map(
|
||||
igl -> igl
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(gli -> {
|
||||
GeoLocation gl = new GeoLocation();
|
||||
gl.setBox(gli.getBox());
|
||||
gl.setPlace(gli.getPlace());
|
||||
gl.setPoint(gli.getPoint());
|
||||
return gl;
|
||||
})
|
||||
.collect(Collectors.toList()))
|
||||
.orElse(null));
|
||||
|
||||
out.setType(ModelConstants.DATASET_DEFAULT_RESULTTYPE.getClassname());
|
||||
break;
|
||||
case "software":
|
||||
|
||||
Software is = (Software) input;
|
||||
Optional
|
||||
.ofNullable(is.getCodeRepositoryUrl())
|
||||
.ifPresent(value -> out.setCodeRepositoryUrl(value.getValue()));
|
||||
Optional
|
||||
.ofNullable(is.getDocumentationUrl())
|
||||
.ifPresent(
|
||||
value -> out
|
||||
.setDocumentationUrl(
|
||||
value
|
||||
.stream()
|
||||
.map(Field::getValue)
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(is.getProgrammingLanguage())
|
||||
.ifPresent(value -> out.setProgrammingLanguage(value.getClassid()));
|
||||
|
||||
out.setType(ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE.getClassname());
|
||||
break;
|
||||
case "other":
|
||||
|
||||
OtherResearchProduct ir = (OtherResearchProduct) input;
|
||||
out
|
||||
.setContactgroup(
|
||||
Optional
|
||||
.ofNullable(ir.getContactgroup())
|
||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
||||
.orElse(null));
|
||||
|
||||
out
|
||||
.setContactperson(
|
||||
Optional
|
||||
.ofNullable(ir.getContactperson())
|
||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
||||
.orElse(null));
|
||||
out
|
||||
.setTool(
|
||||
Optional
|
||||
.ofNullable(ir.getTool())
|
||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
||||
.orElse(null));
|
||||
|
||||
out.setType(ModelConstants.ORP_DEFAULT_RESULTTYPE.getClassname());
|
||||
|
||||
break;
|
||||
default:
|
||||
throw new NoAvailableEntityTypeException();
|
||||
}
|
||||
}
|
||||
|
||||
private static Instance getGraphInstance(eu.dnetlib.dhp.schema.oaf.Instance i) {
|
||||
Instance instance = new Instance();
|
||||
|
||||
setCommonValue(i, instance);
|
||||
|
||||
return instance;
|
||||
|
||||
}
|
||||
|
||||
private static CommunityInstance getCommunityInstance(eu.dnetlib.dhp.schema.oaf.Instance i) {
|
||||
CommunityInstance instance = new CommunityInstance();
|
||||
|
||||
setCommonValue(i, instance);
|
||||
|
||||
instance
|
||||
.setCollectedfrom(
|
||||
CfHbKeyValue
|
||||
.newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
|
||||
|
||||
instance
|
||||
.setHostedby(
|
||||
CfHbKeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
|
||||
|
||||
return instance;
|
||||
|
||||
}
|
||||
|
||||
private static <I extends Instance> void setCommonValue(eu.dnetlib.dhp.schema.oaf.Instance i, I instance) {
|
||||
Optional<eu.dnetlib.dhp.schema.oaf.AccessRight> opAr = Optional.ofNullable(i.getAccessright());
|
||||
|
||||
if (opAr.isPresent() && Constants.accessRightsCoarMap.containsKey(opAr.get().getClassid())) {
|
||||
String code = Constants.accessRightsCoarMap.get(opAr.get().getClassid());
|
||||
|
||||
instance
|
||||
.setAccessright(
|
||||
AccessRight
|
||||
.newInstance(
|
||||
code,
|
||||
Constants.coarCodeLabelMap.get(code),
|
||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
||||
if (mes.isPresent()) {
|
||||
List<Measure> measure = new ArrayList<>();
|
||||
mes
|
||||
.get()
|
||||
.forEach(
|
||||
m -> m.getUnit().forEach(u -> measure.add(Measure.newInstance(m.getId(), u.getValue()))));
|
||||
instance.setMeasures(measure);
|
||||
}
|
||||
|
||||
if (opAr.get().getOpenAccessRoute() != null) {
|
||||
switch (opAr.get().getOpenAccessRoute()) {
|
||||
case hybrid:
|
||||
instance.getAccessright().setOpenAccessRoute(OpenAccessRoute.hybrid);
|
||||
break;
|
||||
case gold:
|
||||
instance.getAccessright().setOpenAccessRoute(OpenAccessRoute.gold);
|
||||
break;
|
||||
case green:
|
||||
instance.getAccessright().setOpenAccessRoute(OpenAccessRoute.green);
|
||||
break;
|
||||
case bronze:
|
||||
instance.getAccessright().setOpenAccessRoute(OpenAccessRoute.bronze);
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Optional
|
||||
.ofNullable(i.getPid())
|
||||
.ifPresent(
|
||||
pid -> instance
|
||||
.setPid(
|
||||
pid
|
||||
.stream()
|
||||
.map(p -> ResultPid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(i.getAlternateIdentifier())
|
||||
.ifPresent(
|
||||
ai -> instance
|
||||
.setAlternateIdentifier(
|
||||
ai
|
||||
.stream()
|
||||
.map(p -> AlternateIdentifier.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(i.getLicense())
|
||||
.ifPresent(value -> instance.setLicense(value.getValue()));
|
||||
Optional
|
||||
.ofNullable(i.getDateofacceptance())
|
||||
.ifPresent(value -> instance.setPublicationdate(value.getValue()));
|
||||
Optional
|
||||
.ofNullable(i.getRefereed())
|
||||
.ifPresent(value -> instance.setRefereed(value.getClassname()));
|
||||
Optional
|
||||
.ofNullable(i.getInstancetype())
|
||||
.ifPresent(value -> instance.setType(value.getClassname()));
|
||||
Optional.ofNullable(i.getUrl()).ifPresent(value -> instance.setUrl(value));
|
||||
Optional<Field<String>> oPca = Optional.ofNullable(i.getProcessingchargeamount());
|
||||
Optional<Field<String>> oPcc = Optional.ofNullable(i.getProcessingchargecurrency());
|
||||
if (oPca.isPresent() && oPcc.isPresent()) {
|
||||
Field<String> pca = oPca.get();
|
||||
Field<String> pcc = oPcc.get();
|
||||
if (!pca.getValue().trim().equals("") && !pcc.getValue().trim().equals("")) {
|
||||
APC apc = new APC();
|
||||
apc.setCurrency(oPcc.get().getValue());
|
||||
apc.setAmount(oPca.get().getValue());
|
||||
instance.setArticleprocessingcharge(apc);
|
||||
}
|
||||
|
||||
}
|
||||
Optional.ofNullable(i.getUrl()).ifPresent(instance::setUrl);
|
||||
|
||||
}
|
||||
|
||||
private static List<Provenance> getUniqueProvenance(List<Provenance> provenance)
|
||||
throws NoAvailableEntityTypeException {
|
||||
Provenance iProv = new Provenance();
|
||||
|
||||
Provenance hProv = new Provenance();
|
||||
Provenance lProv = new Provenance();
|
||||
|
||||
for (Provenance p : provenance) {
|
||||
switch (p.getProvenance()) {
|
||||
case Constants.HARVESTED:
|
||||
hProv = getHighestTrust(hProv, p);
|
||||
break;
|
||||
case Constants.INFERRED:
|
||||
iProv = getHighestTrust(iProv, p);
|
||||
// To be removed as soon as the new beta run has been done
|
||||
// this fixex issue of not set trust during bulktagging
|
||||
if (StringUtils.isEmpty(iProv.getTrust())) {
|
||||
iProv.setTrust(Constants.DEFAULT_TRUST);
|
||||
}
|
||||
break;
|
||||
case Constants.USER_CLAIM:
|
||||
lProv = getHighestTrust(lProv, p);
|
||||
break;
|
||||
default:
|
||||
throw new NoAvailableEntityTypeException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return Arrays
|
||||
.asList(iProv, hProv, lProv)
|
||||
.stream()
|
||||
.filter(p -> !StringUtils.isEmpty(p.getProvenance()))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
}
|
||||
|
||||
private static Provenance getHighestTrust(Provenance hProv, Provenance p) {
|
||||
if (StringUtils.isNoneEmpty(hProv.getTrust(), p.getTrust()))
|
||||
return hProv.getTrust().compareTo(p.getTrust()) > 0 ? hProv : p;
|
||||
|
||||
return (StringUtils.isEmpty(p.getTrust()) && !StringUtils.isEmpty(hProv.getTrust())) ? hProv : p;
|
||||
|
||||
}
|
||||
|
||||
private static Subject getSubject(StructuredProperty s) {
|
||||
Subject subject = new Subject();
|
||||
subject.setSubject(SubjectSchemeValue.newInstance(s.getQualifier().getClassid(), s.getValue()));
|
||||
Optional<DataInfo> di = Optional.ofNullable(s.getDataInfo());
|
||||
if (di.isPresent()) {
|
||||
Provenance p = new Provenance();
|
||||
p.setProvenance(di.get().getProvenanceaction().getClassname());
|
||||
p.setTrust(di.get().getTrust());
|
||||
subject.setProvenance(p);
|
||||
}
|
||||
|
||||
return subject;
|
||||
}
|
||||
|
||||
private static Author getAuthor(eu.dnetlib.dhp.schema.oaf.Author oa) {
|
||||
Author a = new Author();
|
||||
a.setFullname(oa.getFullname());
|
||||
a.setName(oa.getName());
|
||||
a.setSurname(oa.getSurname());
|
||||
a.setRank(oa.getRank());
|
||||
|
||||
Optional<List<StructuredProperty>> oPids = Optional
|
||||
.ofNullable(oa.getPid());
|
||||
if (oPids.isPresent()) {
|
||||
AuthorPid pid = getOrcid(oPids.get());
|
||||
if (pid != null) {
|
||||
a.setPid(pid);
|
||||
}
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
private static AuthorPid getAuthorPid(StructuredProperty pid) {
|
||||
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
||||
if (di.isPresent()) {
|
||||
return AuthorPid
|
||||
.newInstance(
|
||||
AuthorPidSchemeValue
|
||||
.newInstance(
|
||||
pid.getQualifier().getClassid(),
|
||||
pid.getValue()),
|
||||
Provenance
|
||||
.newInstance(
|
||||
di.get().getProvenanceaction().getClassname(),
|
||||
di.get().getTrust()));
|
||||
} else {
|
||||
return AuthorPid
|
||||
.newInstance(
|
||||
AuthorPidSchemeValue
|
||||
.newInstance(
|
||||
pid.getQualifier().getClassid(),
|
||||
pid.getValue())
|
||||
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static AuthorPid getOrcid(List<StructuredProperty> p) {
|
||||
List<StructuredProperty> pidList = p.stream().map(pid -> {
|
||||
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
|
||||
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))) {
|
||||
return pid;
|
||||
}
|
||||
return null;
|
||||
}).filter(Objects::nonNull).collect(Collectors.toList());
|
||||
|
||||
if (pidList.size() == 1) {
|
||||
return getAuthorPid(pidList.get(0));
|
||||
}
|
||||
|
||||
List<StructuredProperty> orcid = pidList
|
||||
.stream()
|
||||
.filter(
|
||||
ap -> ap
|
||||
.getQualifier()
|
||||
.getClassid()
|
||||
.equals(ModelConstants.ORCID))
|
||||
.collect(Collectors.toList());
|
||||
if (orcid.size() == 1) {
|
||||
return getAuthorPid(orcid.get(0));
|
||||
}
|
||||
orcid = pidList
|
||||
.stream()
|
||||
.filter(
|
||||
ap -> ap
|
||||
.getQualifier()
|
||||
.getClassid()
|
||||
.equals(ModelConstants.ORCID_PENDING))
|
||||
.collect(Collectors.toList());
|
||||
if (orcid.size() == 1) {
|
||||
return getAuthorPid(orcid.get(0));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
|
||||
/**
|
||||
* This class connects with the IS related to the isLookUpUrl got as parameter. It saves the information about the
|
||||
* context that will guide the dump of the results. The information saved is a HashMap. The key is the id of a community
|
||||
* - research infrastructure/initiative , the value is the label of the research community - research
|
||||
* infrastructure/initiative.
|
||||
*/
|
||||
|
||||
public class SaveCommunityMap implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SaveCommunityMap.class);
|
||||
private final transient QueryInformationSystem queryInformationSystem;
|
||||
|
||||
private final transient BufferedWriter writer;
|
||||
|
||||
public SaveCommunityMap(String hdfsPath, String hdfsNameNode, String isLookUpUrl) throws IOException {
|
||||
final Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
Path hdfsWritePath = new Path(hdfsPath);
|
||||
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fileSystem.delete(hdfsWritePath, true);
|
||||
}
|
||||
|
||||
queryInformationSystem = new QueryInformationSystem();
|
||||
queryInformationSystem.setIsLookUp(Utils.getIsLookUpService(isLookUpUrl));
|
||||
|
||||
FSDataOutputStream fos = fileSystem.create(hdfsWritePath);
|
||||
writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SaveCommunityMap.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_cm_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String nameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", nameNode);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||
|
||||
final Boolean singleCommunity = Optional
|
||||
.ofNullable(parser.get("singleDeposition"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(false);
|
||||
|
||||
final String community_id = Optional.ofNullable(parser.get("communityId")).orElse(null);
|
||||
|
||||
final SaveCommunityMap scm = new SaveCommunityMap(outputPath, nameNode, isLookUpUrl);
|
||||
|
||||
scm.saveCommunityMap(singleCommunity, community_id);
|
||||
|
||||
}
|
||||
|
||||
private void saveCommunityMap(boolean singleCommunity, String communityId)
|
||||
throws ISLookUpException, IOException, DocumentException, SAXException {
|
||||
final String communityMapString = Utils.OBJECT_MAPPER
|
||||
.writeValueAsString(queryInformationSystem.getCommunityMap(singleCommunity, communityId));
|
||||
log.info("communityMap {} ", communityMapString);
|
||||
writer
|
||||
.write(
|
||||
communityMapString);
|
||||
writer.close();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,100 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.api.MissingConceptDoiException;
|
||||
import eu.dnetlib.dhp.common.api.ZenodoAPIClient;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.NoAvailableEntityTypeException;
|
||||
|
||||
public class SendToZenodoHDFS implements Serializable {
|
||||
|
||||
private static final String NEW = "new"; // to be used for a brand new deposition in zenodo
|
||||
private static final String VERSION = "version"; // to be used to upload a new version of a published deposition
|
||||
private static final String UPDATE = "update"; // to upload content to an open deposition not published
|
||||
|
||||
public static void main(final String[] args) throws Exception, MissingConceptDoiException {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
SendToZenodoHDFS.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/upload_zenodo.json")));
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String hdfsPath = parser.get("hdfsPath");
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
final String access_token = parser.get("accessToken");
|
||||
final String connection_url = parser.get("connectionUrl");
|
||||
final String metadata = parser.get("metadata");
|
||||
final String depositionType = parser.get("depositionType");
|
||||
final String concept_rec_id = Optional
|
||||
.ofNullable(parser.get("conceptRecordId"))
|
||||
.orElse(null);
|
||||
final Boolean publish = Optional
|
||||
.ofNullable(parser.get("publish"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(false);
|
||||
|
||||
final String depositionId = Optional.ofNullable(parser.get("depositionId")).orElse(null);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(conf);
|
||||
|
||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||
.listFiles(
|
||||
new Path(hdfsPath), true);
|
||||
ZenodoAPIClient zenodoApiClient = new ZenodoAPIClient(connection_url, access_token);
|
||||
switch (depositionType) {
|
||||
case NEW:
|
||||
zenodoApiClient.newDeposition();
|
||||
break;
|
||||
case VERSION:
|
||||
if (concept_rec_id == null) {
|
||||
throw new MissingConceptDoiException("No concept record id has been provided");
|
||||
}
|
||||
zenodoApiClient.newVersion(concept_rec_id);
|
||||
break;
|
||||
case UPDATE:
|
||||
if (depositionId == null) {
|
||||
throw new MissingConceptDoiException("No deposition id has been provided");
|
||||
}
|
||||
zenodoApiClient.uploadOpenDeposition(depositionId);
|
||||
break;
|
||||
default:
|
||||
throw new NoAvailableEntityTypeException();
|
||||
}
|
||||
|
||||
while (fileStatusListIterator.hasNext()) {
|
||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||
|
||||
Path p = fileStatus.getPath();
|
||||
String pString = p.toString();
|
||||
if (!pString.endsWith("_SUCCESS")) {
|
||||
String name = pString.substring(pString.lastIndexOf("/") + 1);
|
||||
|
||||
FSDataInputStream inputStream = fileSystem.open(p);
|
||||
zenodoApiClient.uploadIS(inputStream, name, fileStatus.getLen());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
if (!metadata.equals("")) {
|
||||
zenodoApiClient.sendMretadata(metadata);
|
||||
}
|
||||
|
||||
if (Boolean.TRUE.equals(publish)) {
|
||||
zenodoApiClient.publish();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.complete.Constants;
|
||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
|
||||
public class Utils {
|
||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
private Utils() {
|
||||
}
|
||||
|
||||
public static void removeOutputDir(SparkSession spark, String path) {
|
||||
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||
}
|
||||
|
||||
public static <R> Dataset<R> readPath(
|
||||
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
public static ISLookUpService getIsLookUpService(String isLookUpUrl) {
|
||||
return ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
||||
}
|
||||
|
||||
public static String getContextId(String id) {
|
||||
|
||||
return String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID, Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5(id));
|
||||
}
|
||||
|
||||
public static CommunityMap getCommunityMap(SparkSession spark, String communityMapPath) {
|
||||
|
||||
return new Gson().fromJson(spark.read().textFile(communityMapPath).collectAsList().get(0), CommunityMap.class);
|
||||
|
||||
}
|
||||
|
||||
public static CommunityMap readCommunityMap(FileSystem fileSystem, String communityMapPath) throws IOException {
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(fileSystem.open(new Path(communityMapPath))));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
try {
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
sb.append(line);
|
||||
}
|
||||
} finally {
|
||||
br.close();
|
||||
|
||||
}
|
||||
|
||||
return new Gson().fromJson(sb.toString(), CommunityMap.class);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
|
||||
public class CommunityMap extends HashMap<String, String> implements Serializable {
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
||||
|
||||
/**
|
||||
* This class splits the dumped results according to the research community - research initiative/infrastructure they
|
||||
* are related to. The information about the community is found in the element "context.id" in the result. Since the
|
||||
* context that can be found in the result can be associated not only to communities, a community Map is provided. It
|
||||
* will guide the splitting process. Note: the repartition(1) just before writing the results related to a community.
|
||||
* This is a choice due to uploading constraints (just one file for each community) As soon as a better solution will be
|
||||
* in place remove the repartition
|
||||
*/
|
||||
public class CommunitySplit implements Serializable {
|
||||
|
||||
public void run(Boolean isSparkSessionManaged, String inputPath, String outputPath, String communityMapPath) {
|
||||
SparkConf conf = new SparkConf();
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
CommunityMap communityMap = Utils.getCommunityMap(spark, communityMapPath);
|
||||
execSplit(spark, inputPath, outputPath, communityMap);
|
||||
});
|
||||
}
|
||||
|
||||
private static void execSplit(SparkSession spark, String inputPath, String outputPath,
|
||||
CommunityMap communities) {
|
||||
|
||||
Dataset<CommunityResult> result = Utils
|
||||
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
||||
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||
|
||||
communities
|
||||
.keySet()
|
||||
.stream()
|
||||
.forEach(c -> printResult(c, result, outputPath + "/" + communities.get(c).replace(" ", "_")));
|
||||
|
||||
}
|
||||
|
||||
private static void printResult(String c, Dataset<CommunityResult> result, String outputPath) {
|
||||
Dataset<CommunityResult> communityProducts = result
|
||||
.filter((FilterFunction<CommunityResult>) r -> containsCommunity(r, c));
|
||||
|
||||
communityProducts
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
|
||||
private static boolean containsCommunity(CommunityResult r, String c) {
|
||||
if (Optional.ofNullable(r.getContext()).isPresent()) {
|
||||
return r
|
||||
.getContext()
|
||||
.stream()
|
||||
.map(Context::getCode)
|
||||
.collect(Collectors.toList())
|
||||
.contains(c);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||
|
||||
public class ResultProject implements Serializable {
|
||||
private String resultId;
|
||||
private List<Project> projectsList;
|
||||
|
||||
public String getResultId() {
|
||||
return resultId;
|
||||
}
|
||||
|
||||
public void setResultId(String resultId) {
|
||||
this.resultId = resultId;
|
||||
}
|
||||
|
||||
public List<Project> getProjectsList() {
|
||||
return projectsList;
|
||||
}
|
||||
|
||||
public void setProjectsList(List<Project> projectsList) {
|
||||
this.projectsList = projectsList;
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
/**
|
||||
* Spark action to trigger the dump of results associated to research community - reseach initiative/infrasctructure The
|
||||
* actual dump if performed via the class DumpProducts that is used also for the entire graph dump
|
||||
*/
|
||||
public class SparkDumpCommunityProducts implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkDumpCommunityProducts.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkDumpCommunityProducts.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String resultClassName = parser.get("resultTableName");
|
||||
log.info("resultTableName: {}", resultClassName);
|
||||
|
||||
String communityMapPath = parser.get("communityMapPath");
|
||||
|
||||
final String dumpType = Optional
|
||||
.ofNullable(parser.get("dumpType"))
|
||||
.map(String::valueOf)
|
||||
.orElse("community");
|
||||
|
||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||
|
||||
DumpProducts dump = new DumpProducts();
|
||||
|
||||
dump
|
||||
.run(
|
||||
isSparkSessionManaged, inputPath, outputPath, communityMapPath, inputClazz, CommunityResult.class,
|
||||
dumpType);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,191 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.StringReader;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.dom4j.Document;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.dom4j.Node;
|
||||
import org.dom4j.io.SAXReader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Validated;
|
||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import scala.Tuple2;
|
||||
|
||||
/**
|
||||
* Preparation of the Project information to be added to the dumped results. For each result associated to at least one
|
||||
* Project, a serialization of an instance af ResultProject class is done. ResultProject contains the resultId, and the
|
||||
* list of Projects (as in eu.dnetlib.dhp.schema.dump.oaf.community.Project) it is associated to
|
||||
*/
|
||||
public class SparkPrepareResultProject implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkPrepareResultProject.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkPrepareResultProject.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/project_prep_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
prepareResultProjectList(spark, inputPath, outputPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static void prepareResultProjectList(SparkSession spark, String inputPath, String outputPath) {
|
||||
Dataset<Relation> relation = Utils
|
||||
.readPath(spark, inputPath + "/relation", Relation.class)
|
||||
.filter(
|
||||
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
||||
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> projects = Utils
|
||||
.readPath(spark, inputPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
||||
|
||||
projects
|
||||
.joinWith(relation, projects.col("id").equalTo(relation.col("target")), "inner")
|
||||
.groupByKey(
|
||||
(MapFunction<Tuple2<eu.dnetlib.dhp.schema.oaf.Project, Relation>, String>) value -> value
|
||||
._2()
|
||||
.getSource(),
|
||||
Encoders.STRING())
|
||||
.mapGroups(
|
||||
(MapGroupsFunction<String, Tuple2<eu.dnetlib.dhp.schema.oaf.Project, Relation>, ResultProject>) (s,
|
||||
it) -> {
|
||||
Set<String> projectSet = new HashSet<>();
|
||||
Tuple2<eu.dnetlib.dhp.schema.oaf.Project, Relation> first = it.next();
|
||||
ResultProject rp = new ResultProject();
|
||||
rp.setResultId(s);
|
||||
eu.dnetlib.dhp.schema.oaf.Project p = first._1();
|
||||
projectSet.add(p.getId());
|
||||
Project ps = getProject(p, first._2);
|
||||
|
||||
List<Project> projList = new ArrayList<>();
|
||||
projList.add(ps);
|
||||
rp.setProjectsList(projList);
|
||||
it.forEachRemaining(c -> {
|
||||
eu.dnetlib.dhp.schema.oaf.Project op = c._1();
|
||||
if (!projectSet.contains(op.getId())) {
|
||||
projList
|
||||
.add(getProject(op, c._2));
|
||||
|
||||
projectSet.add(op.getId());
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
return rp;
|
||||
}, Encoders.bean(ResultProject.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static Project getProject(eu.dnetlib.dhp.schema.oaf.Project op, Relation relation) {
|
||||
Project p = Project
|
||||
.newInstance(
|
||||
op.getId(),
|
||||
op.getCode().getValue(),
|
||||
Optional
|
||||
.ofNullable(op.getAcronym())
|
||||
.map(Field::getValue)
|
||||
.orElse(null),
|
||||
Optional
|
||||
.ofNullable(op.getTitle())
|
||||
.map(Field::getValue)
|
||||
.orElse(null),
|
||||
Optional
|
||||
.ofNullable(op.getFundingtree())
|
||||
.map(value -> {
|
||||
List<Funder> tmp = value
|
||||
.stream()
|
||||
.map(ft -> getFunder(ft.getValue()))
|
||||
.collect(Collectors.toList());
|
||||
if (!tmp.isEmpty()) {
|
||||
return tmp.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.orElse(null));
|
||||
|
||||
Optional<DataInfo> di = Optional.ofNullable(op.getDataInfo());
|
||||
Provenance provenance = new Provenance();
|
||||
if (di.isPresent()) {
|
||||
provenance.setProvenance(di.get().getProvenanceaction().getClassname());
|
||||
provenance.setTrust(di.get().getTrust());
|
||||
p.setProvenance(provenance);
|
||||
}
|
||||
if (Boolean.TRUE.equals(relation.getValidated())) {
|
||||
p.setValidated(Validated.newInstance(relation.getValidated(), relation.getValidationDate()));
|
||||
}
|
||||
return p;
|
||||
|
||||
}
|
||||
|
||||
private static Funder getFunder(String fundingtree) {
|
||||
final Funder f = new Funder();
|
||||
final Document doc;
|
||||
try {
|
||||
final SAXReader reader = new SAXReader();
|
||||
reader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
doc = reader.read(new StringReader(fundingtree));
|
||||
f.setShortName(((Node) (doc.selectNodes("//funder/shortname").get(0))).getText());
|
||||
f.setName(((Node) (doc.selectNodes("//funder/name").get(0))).getText());
|
||||
f.setJurisdiction(((Node) (doc.selectNodes("//funder/jurisdiction").get(0))).getText());
|
||||
for (Object o : doc.selectNodes("//funding_level_0")) {
|
||||
List<Node> node = ((Node) o).selectNodes("./name");
|
||||
f.setFundingStream((node.get(0)).getText());
|
||||
}
|
||||
|
||||
return f;
|
||||
} catch (DocumentException | SAXException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
||||
/**
|
||||
* Spark job to trigger the split of results associated to research community - reseach initiative/infrasctructure. The
|
||||
* actual split is performed by the class CommunitySplit
|
||||
*/
|
||||
public class SparkSplitForCommunity implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkSplitForCommunity.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkSplitForCommunity.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/split_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String communityMapPath = parser.get("communityMapPath");
|
||||
|
||||
CommunitySplit split = new CommunitySplit();
|
||||
split.run(isSparkSessionManaged, inputPath, outputPath, communityMapPath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.community;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class SparkUpdateProjectInfo implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkUpdateProjectInfo.class);
|
||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkUpdateProjectInfo.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/project_input_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String preparedInfoPath = parser.get("preparedInfoPath");
|
||||
log.info("preparedInfoPath: {}", preparedInfoPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
extend(spark, inputPath, outputPath, preparedInfoPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static void extend(
|
||||
SparkSession spark,
|
||||
String inputPath,
|
||||
String outputPath,
|
||||
String preparedInfoPath) {
|
||||
Dataset<CommunityResult> result = Utils.readPath(spark, inputPath, CommunityResult.class);
|
||||
Dataset<ResultProject> resultProject = Utils.readPath(spark, preparedInfoPath, ResultProject.class);
|
||||
result
|
||||
.joinWith(
|
||||
resultProject, result.col("id").equalTo(resultProject.col("resultId")),
|
||||
"left")
|
||||
.map((MapFunction<Tuple2<CommunityResult, ResultProject>, CommunityResult>) value -> {
|
||||
CommunityResult r = value._1();
|
||||
Optional.ofNullable(value._2()).ifPresent(rp -> r.setProjects(rp.getProjectsList()));
|
||||
return r;
|
||||
}, Encoders.bean(CommunityResult.class))
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Append)
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class Constants implements Serializable {
|
||||
|
||||
public static final String IS_HOSTED_BY = "isHostedBy";
|
||||
public static final String HOSTS = "hosts";
|
||||
|
||||
public static final String IS_FUNDED_BY = "isFundedBy";
|
||||
public static final String FUNDS = "funds";
|
||||
|
||||
public static final String FUNDINGS = "fundings";
|
||||
|
||||
public static final String RESULT_ENTITY = "result";
|
||||
public static final String DATASOURCE_ENTITY = "datasource";
|
||||
public static final String CONTEXT_ENTITY = "context";
|
||||
public static final String ORGANIZATION_ENTITY = "organization";
|
||||
public static final String PROJECT_ENTITY = "project";
|
||||
|
||||
public static final String CONTEXT_ID = "00";
|
||||
public static final String CONTEXT_NS_PREFIX = "context_____";
|
||||
public static final String UNKNOWN = "UNKNOWN";
|
||||
|
||||
}
|
|
@ -1,84 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Deserialization of the information in the context needed to create Context Entities, and relations between context
|
||||
* entities and datasources and projects
|
||||
*/
|
||||
public class ContextInfo implements Serializable {
|
||||
private String id;
|
||||
private String description;
|
||||
private String type;
|
||||
private String zenodocommunity;
|
||||
private String name;
|
||||
private List<String> projectList;
|
||||
private List<String> datasourceList;
|
||||
private List<String> subject;
|
||||
|
||||
public List<String> getSubject() {
|
||||
return subject;
|
||||
}
|
||||
|
||||
public void setSubject(List<String> subject) {
|
||||
this.subject = subject;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public String getZenodocommunity() {
|
||||
return zenodocommunity;
|
||||
}
|
||||
|
||||
public void setZenodocommunity(String zenodocommunity) {
|
||||
this.zenodocommunity = zenodocommunity;
|
||||
}
|
||||
|
||||
public List<String> getProjectList() {
|
||||
return projectList;
|
||||
}
|
||||
|
||||
public void setProjectList(List<String> projectList) {
|
||||
this.projectList = projectList;
|
||||
}
|
||||
|
||||
public List<String> getDatasourceList() {
|
||||
return datasourceList;
|
||||
}
|
||||
|
||||
public void setDatasourceList(List<String> datasourceList) {
|
||||
this.datasourceList = datasourceList;
|
||||
}
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||
import org.apache.hadoop.io.compress.CompressionCodecFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.ResearchInitiative;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
|
||||
/**
|
||||
* Writes on HDFS Context entities. It queries the Information System at the lookup url provided as parameter and
|
||||
* collects the general information for contexes of type community or ri. The general information is the id of the
|
||||
* context, its label, the subjects associated to the context, its zenodo community, description and type. This
|
||||
* information is used to create a new Context Entity
|
||||
*/
|
||||
public class CreateContextEntities implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(CreateContextEntities.class);
|
||||
private final transient Configuration conf;
|
||||
private final transient BufferedWriter writer;
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
CreateContextEntities.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_entity_parameter.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String hdfsPath = parser.get("hdfsPath");
|
||||
log.info("hdfsPath: {}", hdfsPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
|
||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||
|
||||
final CreateContextEntities cce = new CreateContextEntities(hdfsPath, hdfsNameNode);
|
||||
|
||||
log.info("Processing contexts...");
|
||||
cce.execute(Process::getEntity, isLookUpUrl);
|
||||
|
||||
cce.close();
|
||||
|
||||
}
|
||||
|
||||
private void close() throws IOException {
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public CreateContextEntities(String hdfsPath, String hdfsNameNode) throws IOException {
|
||||
this.conf = new Configuration();
|
||||
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||
Path hdfsWritePath = new Path(hdfsPath);
|
||||
FSDataOutputStream fsDataOutputStream = null;
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||
} else {
|
||||
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||
}
|
||||
CompressionCodecFactory factory = new CompressionCodecFactory(conf);
|
||||
CompressionCodec codec = factory.getCodecByClassName("org.apache.hadoop.io.compress.GzipCodec");
|
||||
|
||||
this.writer = new BufferedWriter(new OutputStreamWriter(codec.createOutputStream(fsDataOutputStream),
|
||||
StandardCharsets.UTF_8));
|
||||
|
||||
}
|
||||
|
||||
public <R extends ResearchInitiative> void execute(final Function<ContextInfo, R> producer, String isLookUpUrl)
|
||||
throws ISLookUpException {
|
||||
|
||||
QueryInformationSystem queryInformationSystem = new QueryInformationSystem();
|
||||
queryInformationSystem.setIsLookUp(Utils.getIsLookUpService(isLookUpUrl));
|
||||
|
||||
final Consumer<ContextInfo> consumer = ci -> writeEntity(producer.apply(ci));
|
||||
|
||||
queryInformationSystem.getContextInformation(consumer);
|
||||
}
|
||||
|
||||
protected <R extends ResearchInitiative> void writeEntity(final R r) {
|
||||
try {
|
||||
writer.write(Utils.OBJECT_MAPPER.writeValueAsString(r));
|
||||
writer.newLine();
|
||||
} catch (final IOException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Serializable;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.MyRuntimeException;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
|
||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
|
||||
/**
|
||||
* Writes the set of new Relation between the context and datasources. At the moment the relation between the context
|
||||
* and the project is not created because of a low coverage in the profiles of openaire ids related to projects
|
||||
*/
|
||||
public class CreateContextRelation implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(CreateContextRelation.class);
|
||||
private final transient Configuration conf;
|
||||
private final transient BufferedWriter writer;
|
||||
private final transient QueryInformationSystem queryInformationSystem;
|
||||
|
||||
private static final String CONTEX_RELATION_DATASOURCE = "contentproviders";
|
||||
private static final String CONTEX_RELATION_PROJECT = "projects";
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
Objects
|
||||
.requireNonNull(
|
||||
CreateContextRelation.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_entity_parameter.json")));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String hdfsPath = parser.get("hdfsPath");
|
||||
log.info("hdfsPath: {}", hdfsPath);
|
||||
|
||||
final String hdfsNameNode = parser.get("nameNode");
|
||||
log.info("nameNode: {}", hdfsNameNode);
|
||||
|
||||
final String isLookUpUrl = parser.get("isLookUpUrl");
|
||||
log.info("isLookUpUrl: {}", isLookUpUrl);
|
||||
|
||||
final CreateContextRelation cce = new CreateContextRelation(hdfsPath, hdfsNameNode, isLookUpUrl);
|
||||
|
||||
log.info("Creating relation for datasource...");
|
||||
cce.execute(Process::getRelation, CONTEX_RELATION_DATASOURCE, ModelSupport.getIdPrefix(Datasource.class));
|
||||
|
||||
log.info("Creating relations for projects... ");
|
||||
cce
|
||||
.execute(
|
||||
Process::getRelation, CONTEX_RELATION_PROJECT,
|
||||
ModelSupport.getIdPrefix(eu.dnetlib.dhp.schema.oaf.Project.class));
|
||||
|
||||
cce.close();
|
||||
|
||||
}
|
||||
|
||||
private void close() throws IOException {
|
||||
writer.close();
|
||||
}
|
||||
|
||||
public CreateContextRelation(String hdfsPath, String hdfsNameNode, String isLookUpUrl)
|
||||
throws IOException, ISLookUpException {
|
||||
this.conf = new Configuration();
|
||||
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
queryInformationSystem = new QueryInformationSystem();
|
||||
queryInformationSystem.setIsLookUp(Utils.getIsLookUpService(isLookUpUrl));
|
||||
queryInformationSystem.execContextRelationQuery();
|
||||
|
||||
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||
Path hdfsWritePath = new Path(hdfsPath);
|
||||
FSDataOutputStream fsDataOutputStream = null;
|
||||
if (fileSystem.exists(hdfsWritePath)) {
|
||||
fsDataOutputStream = fileSystem.append(hdfsWritePath);
|
||||
} else {
|
||||
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||
}
|
||||
|
||||
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||
|
||||
}
|
||||
|
||||
public void execute(final Function<ContextInfo, List<Relation>> producer, String category, String prefix) {
|
||||
|
||||
final Consumer<ContextInfo> consumer = ci -> producer.apply(ci).forEach(this::writeEntity);
|
||||
|
||||
queryInformationSystem.getContextRelation(consumer, category, prefix);
|
||||
}
|
||||
|
||||
protected void writeEntity(final Relation r) {
|
||||
try {
|
||||
writer.write(Utils.OBJECT_MAPPER.writeValueAsString(r));
|
||||
writer.newLine();
|
||||
} catch (final Exception e) {
|
||||
throw new MyRuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,518 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.io.StringReader;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.dom4j.Document;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.dom4j.Node;
|
||||
import org.dom4j.io.SAXReader;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Funder;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||
|
||||
/**
|
||||
* Dumps of entities in the model defined in eu.dnetlib.dhp.schema.dump.oaf.graph. Results are dumped using the same
|
||||
* Mapper as for eu.dnetlib.dhp.schema.dump.oaf.community, while for the other entities the mapping is defined below
|
||||
*/
|
||||
public class DumpGraphEntities implements Serializable {
|
||||
|
||||
public void run(Boolean isSparkSessionManaged,
|
||||
String inputPath,
|
||||
String outputPath,
|
||||
Class<? extends OafEntity> inputClazz,
|
||||
String communityMapPath) {
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
switch (ModelSupport.idPrefixMap.get(inputClazz)) {
|
||||
case "50":
|
||||
DumpProducts d = new DumpProducts();
|
||||
d
|
||||
.run(
|
||||
isSparkSessionManaged, inputPath, outputPath, communityMapPath, inputClazz, GraphResult.class,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DUMPTYPE.COMPLETE.getType());
|
||||
break;
|
||||
case "40":
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
projectMap(spark, inputPath, outputPath, inputClazz);
|
||||
|
||||
});
|
||||
break;
|
||||
case "20":
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
organizationMap(spark, inputPath, outputPath, inputClazz);
|
||||
|
||||
});
|
||||
break;
|
||||
case "10":
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
datasourceMap(spark, inputPath, outputPath, inputClazz);
|
||||
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static <E extends OafEntity> void datasourceMap(SparkSession spark, String inputPath, String outputPath,
|
||||
Class<E> inputClazz) {
|
||||
Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.map(
|
||||
(MapFunction<E, Datasource>) d -> mapDatasource((eu.dnetlib.dhp.schema.oaf.Datasource) d),
|
||||
Encoders.bean(Datasource.class))
|
||||
.filter(Objects::nonNull)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static <E extends OafEntity> void projectMap(SparkSession spark, String inputPath, String outputPath,
|
||||
Class<E> inputClazz) {
|
||||
Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.map(
|
||||
(MapFunction<E, Project>) p -> mapProject((eu.dnetlib.dhp.schema.oaf.Project) p),
|
||||
Encoders.bean(Project.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static Datasource mapDatasource(eu.dnetlib.dhp.schema.oaf.Datasource d) {
|
||||
Datasource datasource = new Datasource();
|
||||
|
||||
datasource.setId(d.getId());
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOriginalId())
|
||||
.ifPresent(
|
||||
oId -> datasource.setOriginalId(oId.stream().filter(Objects::nonNull).collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getPid())
|
||||
.ifPresent(
|
||||
pids -> pids
|
||||
.stream()
|
||||
.map(p -> DatasourcePid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDatasourcetype())
|
||||
.ifPresent(
|
||||
dsType -> datasource
|
||||
.setDatasourcetype(DatasourceSchemeValue.newInstance(dsType.getClassid(), dsType.getClassname())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOpenairecompatibility())
|
||||
.ifPresent(v -> datasource.setOpenairecompatibility(v.getClassname()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOfficialname())
|
||||
.ifPresent(oname -> datasource.setOfficialname(oname.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getEnglishname())
|
||||
.ifPresent(ename -> datasource.setEnglishname(ename.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getWebsiteurl())
|
||||
.ifPresent(wsite -> datasource.setWebsiteurl(wsite.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getLogourl())
|
||||
.ifPresent(lurl -> datasource.setLogourl(lurl.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDateofvalidation())
|
||||
.ifPresent(dval -> datasource.setDateofvalidation(dval.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDescription())
|
||||
.ifPresent(dex -> datasource.setDescription(dex.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getSubjects())
|
||||
.ifPresent(
|
||||
sbjs -> datasource.setSubjects(sbjs.stream().map(sbj -> sbj.getValue()).collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOdpolicies())
|
||||
.ifPresent(odp -> datasource.setPolicies(Arrays.asList(odp.getValue())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOdlanguages())
|
||||
.ifPresent(
|
||||
langs -> datasource
|
||||
.setLanguages(langs.stream().map(lang -> lang.getValue()).collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getOdcontenttypes())
|
||||
.ifPresent(
|
||||
ctypes -> datasource
|
||||
.setContenttypes(ctypes.stream().map(ctype -> ctype.getValue()).collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getReleasestartdate())
|
||||
.ifPresent(rd -> datasource.setReleasestartdate(rd.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getReleaseenddate())
|
||||
.ifPresent(ed -> datasource.setReleaseenddate(ed.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getMissionstatementurl())
|
||||
.ifPresent(ms -> datasource.setMissionstatementurl(ms.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDatabaseaccesstype())
|
||||
.ifPresent(ar -> datasource.setAccessrights(ar.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDatauploadtype())
|
||||
.ifPresent(dut -> datasource.setUploadrights(dut.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDatabaseaccessrestriction())
|
||||
.ifPresent(dar -> datasource.setDatabaseaccessrestriction(dar.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getDatauploadrestriction())
|
||||
.ifPresent(dur -> datasource.setDatauploadrestriction(dur.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getVersioning())
|
||||
.ifPresent(v -> datasource.setVersioning(v.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getCitationguidelineurl())
|
||||
.ifPresent(cu -> datasource.setCitationguidelineurl(cu.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getPidsystems())
|
||||
.ifPresent(ps -> datasource.setPidsystems(ps.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getCertificates())
|
||||
.ifPresent(c -> datasource.setCertificates(c.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getPolicies())
|
||||
.ifPresent(ps -> datasource.setPolicies(ps.stream().map(p -> p.getValue()).collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(d.getJournal())
|
||||
.ifPresent(j -> datasource.setJournal(getContainer(j)));
|
||||
|
||||
return datasource;
|
||||
|
||||
}
|
||||
|
||||
private static Container getContainer(Journal j) {
|
||||
Container c = new Container();
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getName())
|
||||
.ifPresent(n -> c.setName(n));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getIssnPrinted())
|
||||
.ifPresent(issnp -> c.setIssnPrinted(issnp));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getIssnOnline())
|
||||
.ifPresent(issno -> c.setIssnOnline(issno));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getIssnLinking())
|
||||
.ifPresent(isnl -> c.setIssnLinking(isnl));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getEp())
|
||||
.ifPresent(ep -> c.setEp(ep));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getIss())
|
||||
.ifPresent(iss -> c.setIss(iss));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getSp())
|
||||
.ifPresent(sp -> c.setSp(sp));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getVol())
|
||||
.ifPresent(vol -> c.setVol(vol));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getEdition())
|
||||
.ifPresent(edition -> c.setEdition(edition));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getConferencedate())
|
||||
.ifPresent(cdate -> c.setConferencedate(cdate));
|
||||
|
||||
Optional
|
||||
.ofNullable(j.getConferenceplace())
|
||||
.ifPresent(cplace -> c.setConferenceplace(cplace));
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
private static Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p) throws DocumentException {
|
||||
Project project = new Project();
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getId())
|
||||
.ifPresent(id -> project.setId(id));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getWebsiteurl())
|
||||
.ifPresent(w -> project.setWebsiteurl(w.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getCode())
|
||||
.ifPresent(code -> project.setCode(code.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getAcronym())
|
||||
.ifPresent(acronynim -> project.setAcronym(acronynim.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getTitle())
|
||||
.ifPresent(title -> project.setTitle(title.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getStartdate())
|
||||
.ifPresent(sdate -> project.setStartdate(sdate.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getEnddate())
|
||||
.ifPresent(edate -> project.setEnddate(edate.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getCallidentifier())
|
||||
.ifPresent(cide -> project.setCallidentifier(cide.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getKeywords())
|
||||
.ifPresent(key -> project.setKeywords(key.getValue()));
|
||||
|
||||
Optional<Field<String>> omandate = Optional.ofNullable(p.getOamandatepublications());
|
||||
Optional<Field<String>> oecsc39 = Optional.ofNullable(p.getEcsc39());
|
||||
boolean mandate = false;
|
||||
if (omandate.isPresent()) {
|
||||
if (omandate.get().getValue().equals("true")) {
|
||||
mandate = true;
|
||||
}
|
||||
}
|
||||
if (oecsc39.isPresent()) {
|
||||
if (oecsc39.get().getValue().equals("true")) {
|
||||
mandate = true;
|
||||
}
|
||||
}
|
||||
|
||||
project.setOpenaccessmandateforpublications(mandate);
|
||||
project.setOpenaccessmandatefordataset(false);
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getEcarticle29_3())
|
||||
.ifPresent(oamandate -> project.setOpenaccessmandatefordataset(oamandate.getValue().equals("true")));
|
||||
|
||||
project
|
||||
.setSubject(
|
||||
Optional
|
||||
.ofNullable(p.getSubjects())
|
||||
.map(subjs -> subjs.stream().map(s -> s.getValue()).collect(Collectors.toList()))
|
||||
.orElse(new ArrayList<>()));
|
||||
|
||||
Optional
|
||||
.ofNullable(p.getSummary())
|
||||
.ifPresent(summary -> project.setSummary(summary.getValue()));
|
||||
|
||||
Optional<Float> ofundedamount = Optional.ofNullable(p.getFundedamount());
|
||||
Optional<Field<String>> ocurrency = Optional.ofNullable(p.getCurrency());
|
||||
Optional<Float> ototalcost = Optional.ofNullable(p.getTotalcost());
|
||||
|
||||
if (ocurrency.isPresent()) {
|
||||
if (ofundedamount.isPresent()) {
|
||||
if (ototalcost.isPresent()) {
|
||||
project
|
||||
.setGranted(
|
||||
Granted.newInstance(ocurrency.get().getValue(), ototalcost.get(), ofundedamount.get()));
|
||||
} else {
|
||||
project.setGranted(Granted.newInstance(ocurrency.get().getValue(), ofundedamount.get()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
project
|
||||
.setH2020programme(
|
||||
Optional
|
||||
.ofNullable(p.getH2020classification())
|
||||
.map(
|
||||
classification -> classification
|
||||
.stream()
|
||||
.map(
|
||||
c -> Programme
|
||||
.newInstance(
|
||||
c.getH2020Programme().getCode(), c.getH2020Programme().getDescription()))
|
||||
.collect(Collectors.toList()))
|
||||
.orElse(new ArrayList<>()));
|
||||
|
||||
Optional<List<Field<String>>> ofundTree = Optional
|
||||
.ofNullable(p.getFundingtree());
|
||||
List<Funder> funList = new ArrayList<>();
|
||||
if (ofundTree.isPresent()) {
|
||||
for (Field<String> fundingtree : ofundTree.get()) {
|
||||
funList.add(getFunder(fundingtree.getValue()));
|
||||
}
|
||||
}
|
||||
project.setFunding(funList);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
public static Funder getFunder(String fundingtree) throws DocumentException {
|
||||
Funder f = new Funder();
|
||||
final Document doc;
|
||||
|
||||
doc = new SAXReader().read(new StringReader(fundingtree));
|
||||
f.setShortName(((org.dom4j.Node) (doc.selectNodes("//funder/shortname").get(0))).getText());
|
||||
f.setName(((org.dom4j.Node) (doc.selectNodes("//funder/name").get(0))).getText());
|
||||
f.setJurisdiction(((org.dom4j.Node) (doc.selectNodes("//funder/jurisdiction").get(0))).getText());
|
||||
// f.setId(((org.dom4j.Node) (doc.selectNodes("//funder/id").get(0))).getText());
|
||||
|
||||
String id = "";
|
||||
String description = "";
|
||||
// List<Levels> fundings = new ArrayList<>();
|
||||
int level = 0;
|
||||
List<org.dom4j.Node> nodes = doc.selectNodes("//funding_level_" + level);
|
||||
while (nodes.size() > 0) {
|
||||
for (org.dom4j.Node n : nodes) {
|
||||
|
||||
List node = n.selectNodes("./id");
|
||||
id = ((org.dom4j.Node) node.get(0)).getText();
|
||||
id = id.substring(id.indexOf("::") + 2);
|
||||
|
||||
node = n.selectNodes("./description");
|
||||
description += ((Node) node.get(0)).getText() + " - ";
|
||||
|
||||
}
|
||||
level += 1;
|
||||
nodes = doc.selectNodes("//funding_level_" + level);
|
||||
}
|
||||
|
||||
if (!id.equals("")) {
|
||||
Fundings fundings = new Fundings();
|
||||
fundings.setId(id);
|
||||
fundings.setDescription(description.substring(0, description.length() - 3).trim());
|
||||
f.setFunding_stream(fundings);
|
||||
}
|
||||
|
||||
return f;
|
||||
|
||||
}
|
||||
|
||||
private static <E extends OafEntity> void organizationMap(SparkSession spark, String inputPath, String outputPath,
|
||||
Class<E> inputClazz) {
|
||||
Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.map(
|
||||
(MapFunction<E, Organization>) o -> mapOrganization((eu.dnetlib.dhp.schema.oaf.Organization) o),
|
||||
Encoders.bean(Organization.class))
|
||||
.filter((FilterFunction<Organization>) o -> o != null)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
private static eu.dnetlib.dhp.schema.dump.oaf.graph.Organization mapOrganization(
|
||||
eu.dnetlib.dhp.schema.oaf.Organization org) {
|
||||
if (org.getDataInfo().getDeletedbyinference())
|
||||
return null;
|
||||
Organization organization = new Organization();
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getLegalshortname())
|
||||
.ifPresent(value -> organization.setLegalshortname(value.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getLegalname())
|
||||
.ifPresent(value -> organization.setLegalname(value.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getWebsiteurl())
|
||||
.ifPresent(value -> organization.setWebsiteurl(value.getValue()));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getAlternativeNames())
|
||||
.ifPresent(
|
||||
value -> organization
|
||||
.setAlternativenames(
|
||||
value
|
||||
.stream()
|
||||
.map(v -> v.getValue())
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getCountry())
|
||||
.ifPresent(
|
||||
value -> {
|
||||
if (!value.getClassid().equals(Constants.UNKNOWN)) {
|
||||
organization.setCountry(Country.newInstance(value.getClassid(), value.getClassname()));
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getId())
|
||||
.ifPresent(value -> organization.setId(value));
|
||||
|
||||
Optional
|
||||
.ofNullable(org.getPid())
|
||||
.ifPresent(
|
||||
value -> organization
|
||||
.setPid(
|
||||
value
|
||||
.stream()
|
||||
.map(p -> OrganizationPid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||
.collect(Collectors.toList())));
|
||||
|
||||
return organization;
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Node;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.RelType;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Relation;
|
||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
/**
|
||||
* Creates new Relations (as in eu.dnetlib.dhp.schema.dump.oaf.graph.Relation) from the information in the Entity. The
|
||||
* new Relations are created for the datasource in the collectedfrom and hostedby elements and for the context related
|
||||
* to communities and research initiative/infrastructures. For collectedfrom elements it creates: datasource -> provides
|
||||
* -> result and result -> isProvidedBy -> datasource For hostedby elements it creates: datasource -> hosts -> result
|
||||
* and result -> isHostedBy -> datasource For context elements it creates: context <-> isRelatedTo <-> result. Note for
|
||||
* context: it gets the first provenance in the dataInfo. If more than one is present the others are not dumped
|
||||
*/
|
||||
public class Extractor implements Serializable {
|
||||
|
||||
public void run(Boolean isSparkSessionManaged,
|
||||
String inputPath,
|
||||
String outputPath,
|
||||
Class<? extends Result> inputClazz,
|
||||
String communityMapPath) {
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
extractRelationResult(
|
||||
spark, inputPath, outputPath, inputClazz, Utils.getCommunityMap(spark, communityMapPath));
|
||||
});
|
||||
}
|
||||
|
||||
private <R extends Result> void extractRelationResult(SparkSession spark,
|
||||
String inputPath,
|
||||
String outputPath,
|
||||
Class<R> inputClazz,
|
||||
CommunityMap communityMap) {
|
||||
|
||||
Set<Integer> hashCodes = new HashSet<>();
|
||||
|
||||
Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.flatMap((FlatMapFunction<R, Relation>) value -> {
|
||||
List<Relation> relationList = new ArrayList<>();
|
||||
extractRelationsFromInstance(hashCodes, value, relationList);
|
||||
Set<String> communities = communityMap.keySet();
|
||||
Optional
|
||||
.ofNullable(value.getContext())
|
||||
.ifPresent(contexts -> contexts.forEach(context -> {
|
||||
String id = context.getId();
|
||||
if (id.contains(":")) {
|
||||
id = id.substring(0, id.indexOf(":"));
|
||||
}
|
||||
if (communities.contains(id)) {
|
||||
String contextId = Utils.getContextId(id);
|
||||
Provenance provenance = Optional
|
||||
.ofNullable(context.getDataInfo())
|
||||
.map(
|
||||
dinfo -> Optional
|
||||
.ofNullable(dinfo.get(0).getProvenanceaction())
|
||||
.map(
|
||||
paction -> Provenance
|
||||
.newInstance(
|
||||
paction.getClassid(),
|
||||
dinfo.get(0).getTrust()))
|
||||
.orElse(null))
|
||||
.orElse(null);
|
||||
Relation r = getRelation(
|
||||
value.getId(), contextId,
|
||||
Constants.RESULT_ENTITY,
|
||||
Constants.CONTEXT_ENTITY,
|
||||
ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP, provenance);
|
||||
if (!hashCodes.contains(r.hashCode())) {
|
||||
relationList
|
||||
.add(r);
|
||||
hashCodes.add(r.hashCode());
|
||||
}
|
||||
r = getRelation(
|
||||
contextId, value.getId(),
|
||||
Constants.CONTEXT_ENTITY,
|
||||
Constants.RESULT_ENTITY,
|
||||
ModelConstants.IS_RELATED_TO,
|
||||
ModelConstants.RELATIONSHIP, provenance);
|
||||
if (!hashCodes.contains(r.hashCode())) {
|
||||
relationList
|
||||
.add(
|
||||
r);
|
||||
hashCodes.add(r.hashCode());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}));
|
||||
|
||||
return relationList.iterator();
|
||||
}, Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
|
||||
private <R extends Result> void extractRelationsFromInstance(Set<Integer> hashCodes, R value,
|
||||
List<Relation> relationList) {
|
||||
Optional
|
||||
.ofNullable(value.getInstance())
|
||||
.ifPresent(inst -> inst.forEach(instance -> {
|
||||
Optional
|
||||
.ofNullable(instance.getCollectedfrom())
|
||||
.ifPresent(
|
||||
cf -> getRelatioPair(
|
||||
value, relationList, cf,
|
||||
ModelConstants.IS_PROVIDED_BY, ModelConstants.PROVIDES, hashCodes));
|
||||
Optional
|
||||
.ofNullable(instance.getHostedby())
|
||||
.ifPresent(
|
||||
hb -> getRelatioPair(
|
||||
value, relationList, hb,
|
||||
Constants.IS_HOSTED_BY, Constants.HOSTS, hashCodes));
|
||||
}));
|
||||
}
|
||||
|
||||
private static <R extends Result> void getRelatioPair(R value, List<Relation> relationList, KeyValue cf,
|
||||
String resultDatasource, String datasourceResult,
|
||||
Set<Integer> hashCodes) {
|
||||
Provenance provenance = Optional
|
||||
.ofNullable(cf.getDataInfo())
|
||||
.map(
|
||||
dinfo -> Optional
|
||||
.ofNullable(dinfo.getProvenanceaction())
|
||||
.map(
|
||||
paction -> Provenance
|
||||
.newInstance(
|
||||
paction.getClassname(),
|
||||
dinfo.getTrust()))
|
||||
.orElse(
|
||||
Provenance
|
||||
.newInstance(
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.HARVESTED,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DEFAULT_TRUST)))
|
||||
.orElse(
|
||||
Provenance
|
||||
.newInstance(
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.HARVESTED,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DEFAULT_TRUST));
|
||||
Relation r = getRelation(
|
||||
value.getId(),
|
||||
cf.getKey(), Constants.RESULT_ENTITY, Constants.DATASOURCE_ENTITY,
|
||||
resultDatasource, ModelConstants.PROVISION,
|
||||
provenance);
|
||||
if (!hashCodes.contains(r.hashCode())) {
|
||||
relationList
|
||||
.add(r);
|
||||
hashCodes.add(r.hashCode());
|
||||
}
|
||||
|
||||
r = getRelation(
|
||||
cf.getKey(), value.getId(),
|
||||
Constants.DATASOURCE_ENTITY, Constants.RESULT_ENTITY,
|
||||
datasourceResult, ModelConstants.PROVISION,
|
||||
provenance);
|
||||
|
||||
if (!hashCodes.contains(r.hashCode())) {
|
||||
relationList
|
||||
.add(r);
|
||||
hashCodes.add(r.hashCode());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static Relation getRelation(String source, String target, String sourceType, String targetType,
|
||||
String relName, String relType, Provenance provenance) {
|
||||
Relation r = new Relation();
|
||||
r.setSource(Node.newInstance(source, sourceType));
|
||||
r.setTarget(Node.newInstance(target, targetType));
|
||||
r.setReltype(RelType.newInstance(relName, relType));
|
||||
r.setProvenance(provenance);
|
||||
return r;
|
||||
}
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class MergedRels implements Serializable {
|
||||
private String organizationId;
|
||||
private String representativeId;
|
||||
|
||||
public String getOrganizationId() {
|
||||
return organizationId;
|
||||
}
|
||||
|
||||
public void setOrganizationId(String organizationId) {
|
||||
this.organizationId = organizationId;
|
||||
}
|
||||
|
||||
public String getRepresentativeId() {
|
||||
return representativeId;
|
||||
}
|
||||
|
||||
public void setRepresentativeId(String representativeId) {
|
||||
this.representativeId = representativeId;
|
||||
}
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
public class OrganizationMap extends HashMap<String, List<String>> {
|
||||
|
||||
public OrganizationMap() {
|
||||
super();
|
||||
}
|
||||
|
||||
public List<String> get(String key) {
|
||||
|
||||
if (super.get(key) == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
return super.get(key);
|
||||
}
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Constants;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.exceptions.MyRuntimeException;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
|
||||
|
||||
/**
|
||||
* It process the ContextInfo information to produce a new Context Entity or a set of Relations between the generic
|
||||
* context entity and datasource/projects related to the context.
|
||||
*/
|
||||
public class Process implements Serializable {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <R extends ResearchInitiative> R getEntity(ContextInfo ci) {
|
||||
try {
|
||||
ResearchInitiative ri;
|
||||
if (ci.getType().equals("community")) {
|
||||
ri = new ResearchCommunity();
|
||||
((ResearchCommunity) ri).setSubject(ci.getSubject());
|
||||
ri.setType(Constants.RESEARCH_COMMUNITY);
|
||||
} else {
|
||||
ri = new ResearchInitiative();
|
||||
ri.setType(Constants.RESEARCH_INFRASTRUCTURE);
|
||||
}
|
||||
ri.setId(Utils.getContextId(ci.getId()));
|
||||
ri.setAcronym(ci.getId());
|
||||
|
||||
ri.setDescription(ci.getDescription());
|
||||
ri.setName(ci.getName());
|
||||
if (StringUtils.isNotEmpty(ci.getZenodocommunity())) {
|
||||
ri.setZenodo_community(Constants.ZENODO_COMMUNITY_PREFIX + ci.getZenodocommunity());
|
||||
}
|
||||
return (R) ri;
|
||||
|
||||
} catch (final Exception e) {
|
||||
throw new MyRuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static List<Relation> getRelation(ContextInfo ci) {
|
||||
try {
|
||||
|
||||
List<Relation> relationList = new ArrayList<>();
|
||||
ci
|
||||
.getDatasourceList()
|
||||
.forEach(ds -> {
|
||||
|
||||
String nodeType = ModelSupport.idPrefixEntity.get(ds.substring(0, 2));
|
||||
|
||||
String contextId = Utils.getContextId(ci.getId());
|
||||
relationList
|
||||
.add(
|
||||
Relation
|
||||
.newInstance(
|
||||
Node
|
||||
.newInstance(
|
||||
contextId, eu.dnetlib.dhp.schema.dump.oaf.graph.Constants.CONTEXT_ENTITY),
|
||||
Node.newInstance(ds, nodeType),
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
Constants.USER_CLAIM,
|
||||
Constants.DEFAULT_TRUST)));
|
||||
|
||||
relationList
|
||||
.add(
|
||||
Relation
|
||||
.newInstance(
|
||||
Node.newInstance(ds, nodeType),
|
||||
Node
|
||||
.newInstance(
|
||||
contextId, eu.dnetlib.dhp.schema.dump.oaf.graph.Constants.CONTEXT_ENTITY),
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
Constants.USER_CLAIM,
|
||||
Constants.DEFAULT_TRUST)));
|
||||
|
||||
});
|
||||
|
||||
return relationList;
|
||||
|
||||
} catch (final Exception e) {
|
||||
throw new MyRuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,195 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.*;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.dom4j.Document;
|
||||
import org.dom4j.DocumentException;
|
||||
import org.dom4j.Element;
|
||||
import org.dom4j.Node;
|
||||
import org.dom4j.io.SAXReader;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||
|
||||
public class QueryInformationSystem {
|
||||
|
||||
private ISLookUpService isLookUp;
|
||||
private List<String> contextRelationResult;
|
||||
|
||||
private static final String XQUERY = "for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') "
|
||||
+
|
||||
" where $x//CONFIGURATION/context[./@type='community' or ./@type='ri'] " +
|
||||
" and $x//context/param[./@name = 'status']/text() = 'all' " +
|
||||
" return " +
|
||||
"$x//context";
|
||||
|
||||
private static final String XQUERY_ENTITY = "for $x in collection('/db/DRIVER/ContextDSResources/ContextDSResourceType') "
|
||||
+
|
||||
"where $x//context[./@type='community' or ./@type = 'ri'] and $x//context/param[./@name = 'status']/text() = 'all' return "
|
||||
+
|
||||
"concat(data($x//context/@id) , '@@', $x//context/param[./@name =\"name\"]/text(), '@@', " +
|
||||
"$x//context/param[./@name=\"description\"]/text(), '@@', $x//context/param[./@name = \"subject\"]/text(), '@@', "
|
||||
+
|
||||
"$x//context/param[./@name = \"zenodoCommunity\"]/text(), '@@', $x//context/@type)";
|
||||
|
||||
public void getContextInformation(final Consumer<ContextInfo> consumer) throws ISLookUpException {
|
||||
|
||||
isLookUp
|
||||
.quickSearchProfile(XQUERY_ENTITY)
|
||||
.forEach(c -> {
|
||||
ContextInfo cinfo = new ContextInfo();
|
||||
String[] cSplit = c.split("@@");
|
||||
cinfo.setId(cSplit[0]);
|
||||
cinfo.setName(cSplit[1]);
|
||||
cinfo.setDescription(cSplit[2]);
|
||||
if (!cSplit[3].trim().equals("")) {
|
||||
cinfo.setSubject(Arrays.asList(cSplit[3].split(",")));
|
||||
}
|
||||
cinfo.setZenodocommunity(cSplit[4]);
|
||||
cinfo.setType(cSplit[5]);
|
||||
consumer.accept(cinfo);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public List<String> getContextRelationResult() {
|
||||
return contextRelationResult;
|
||||
}
|
||||
|
||||
public void setContextRelationResult(List<String> contextRelationResult) {
|
||||
this.contextRelationResult = contextRelationResult;
|
||||
}
|
||||
|
||||
public ISLookUpService getIsLookUp() {
|
||||
return isLookUp;
|
||||
}
|
||||
|
||||
public void setIsLookUp(ISLookUpService isLookUpService) {
|
||||
this.isLookUp = isLookUpService;
|
||||
}
|
||||
|
||||
public void execContextRelationQuery() throws ISLookUpException {
|
||||
contextRelationResult = isLookUp.quickSearchProfile(XQUERY);
|
||||
|
||||
}
|
||||
|
||||
public void getContextRelation(final Consumer<ContextInfo> consumer, String category, String prefix) {
|
||||
|
||||
contextRelationResult.forEach(xml -> {
|
||||
ContextInfo cinfo = new ContextInfo();
|
||||
final Document doc;
|
||||
|
||||
try {
|
||||
final SAXReader reader = new SAXReader();
|
||||
reader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
doc = reader.read(new StringReader(xml));
|
||||
Element root = doc.getRootElement();
|
||||
cinfo.setId(root.attributeValue("id"));
|
||||
|
||||
Iterator<Element> it = root.elementIterator();
|
||||
while (it.hasNext()) {
|
||||
Element el = it.next();
|
||||
if (el.getName().equals("category")) {
|
||||
String categoryId = el.attributeValue("id");
|
||||
categoryId = categoryId.substring(categoryId.lastIndexOf("::") + 2);
|
||||
if (categoryId.equals(category)) {
|
||||
cinfo.setDatasourceList(getCategoryList(el, prefix));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
consumer.accept(cinfo);
|
||||
} catch (DocumentException | SAXException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private List<String> getCategoryList(Element el, String prefix) {
|
||||
List<String> datasourceList = new ArrayList<>();
|
||||
for (Object node : el.selectNodes(".//concept")) {
|
||||
String oid = getOpenaireId((Node) node, prefix);
|
||||
if (oid != null)
|
||||
datasourceList.add(oid);
|
||||
}
|
||||
|
||||
return datasourceList;
|
||||
}
|
||||
|
||||
private String getOpenaireId(Node el, String prefix) {
|
||||
for (Object node : el.selectNodes(".//param")) {
|
||||
Node n = (Node) node;
|
||||
if (n.valueOf("./@name").equals("openaireId")) {
|
||||
return prefix + "|" + n.getText();
|
||||
}
|
||||
}
|
||||
|
||||
return makeOpenaireId(el, prefix);
|
||||
|
||||
}
|
||||
|
||||
private String makeOpenaireId(Node el, String prefix) {
|
||||
if (!prefix.equals(ModelSupport.entityIdPrefix.get("project"))) {
|
||||
return null;
|
||||
}
|
||||
String funder = "";
|
||||
String grantId = null;
|
||||
String funding = null;
|
||||
for (Object node : el.selectNodes(".//param")) {
|
||||
Node n = (Node) node;
|
||||
switch (n.valueOf("./@name")) {
|
||||
case "funding":
|
||||
funding = n.getText();
|
||||
break;
|
||||
case "funder":
|
||||
funder = n.getText();
|
||||
break;
|
||||
case "CD_PROJECT_NUMBER":
|
||||
grantId = n.getText();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
String nsp = null;
|
||||
|
||||
switch (funder.toLowerCase()) {
|
||||
case "ec":
|
||||
if (funding == null) {
|
||||
return null;
|
||||
}
|
||||
if (funding.toLowerCase().contains("h2020")) {
|
||||
nsp = "corda__h2020::";
|
||||
} else {
|
||||
nsp = "corda_______::";
|
||||
}
|
||||
break;
|
||||
case "tubitak":
|
||||
nsp = "tubitakf____::";
|
||||
break;
|
||||
case "dfg":
|
||||
nsp = "dfgf________::";
|
||||
break;
|
||||
default:
|
||||
StringBuilder bld = new StringBuilder();
|
||||
bld.append(funder.toLowerCase());
|
||||
for (int i = funder.length(); i < 12; i++)
|
||||
bld.append("_");
|
||||
bld.append("::");
|
||||
nsp = bld.toString();
|
||||
}
|
||||
|
||||
return prefix + "|" + nsp + DHPUtils.md5(grantId);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,122 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.GraphResult;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Relation;
|
||||
|
||||
/**
|
||||
* Reads all the entities of the same type (Relation / Results) and saves them in the same folder
|
||||
*/
|
||||
public class SparkCollectAndSave implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkCollectAndSave.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkCollectAndSave.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_collect_and_save.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final Boolean aggregateResult = Optional
|
||||
.ofNullable(parser.get("resultAggregation"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath + "/result");
|
||||
run(spark, inputPath, outputPath, aggregateResult);
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void run(SparkSession spark, String inputPath, String outputPath, boolean aggregate) {
|
||||
if (aggregate) {
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/result/publication", GraphResult.class)
|
||||
.union(Utils.readPath(spark, inputPath + "/result/dataset", GraphResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/result/otherresearchproduct", GraphResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/result/software", GraphResult.class))
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath + "/result");
|
||||
} else {
|
||||
write(
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/result/publication", GraphResult.class),
|
||||
outputPath + "/publication");
|
||||
write(
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/result/dataset", GraphResult.class),
|
||||
outputPath + "/dataset");
|
||||
write(
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/result/otherresearchproduct", GraphResult.class),
|
||||
outputPath + "/otheresearchproduct");
|
||||
write(
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/result/software", GraphResult.class),
|
||||
outputPath + "/software");
|
||||
|
||||
}
|
||||
|
||||
Utils
|
||||
.readPath(spark, inputPath + "/relation/publication", Relation.class)
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/dataset", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/orp", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/software", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/contextOrg", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/context", Relation.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/relation/relation", Relation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/relation");
|
||||
|
||||
}
|
||||
|
||||
private static void write(Dataset<GraphResult> dataSet, String outputPath) {
|
||||
dataSet
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath);
|
||||
}
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||
|
||||
/**
|
||||
* Spark Job that fires the dump for the entites
|
||||
*/
|
||||
public class SparkDumpEntitiesJob implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkDumpEntitiesJob.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkDumpEntitiesJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String resultClassName = parser.get("resultTableName");
|
||||
log.info("resultTableName: {}", resultClassName);
|
||||
|
||||
final String communityMapPath = parser.get("communityMapPath");
|
||||
|
||||
Class<? extends OafEntity> inputClazz = (Class<? extends OafEntity>) Class.forName(resultClassName);
|
||||
|
||||
DumpGraphEntities dg = new DumpGraphEntities();
|
||||
dg.run(isSparkSessionManaged, inputPath, outputPath, inputClazz, communityMapPath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.FilterFunction;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Node;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.RelType;
|
||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
|
||||
/**
|
||||
* Dumps eu.dnetlib.dhp.schema.oaf.Relation in eu.dnetlib.dhp.schema.dump.oaf.graph.Relation
|
||||
*/
|
||||
public class SparkDumpRelationJob implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkDumpRelationJob.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkDumpRelationJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
Optional<String> rs = Optional.ofNullable(parser.get("removeSet"));
|
||||
final Set<String> removeSet = new HashSet<>();
|
||||
if (rs.isPresent()) {
|
||||
Collections.addAll(removeSet, rs.get().split(";"));
|
||||
}
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
dumpRelation(spark, inputPath, outputPath, removeSet);
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath, Set<String> removeSet) {
|
||||
Dataset<Relation> relations = Utils.readPath(spark, inputPath, Relation.class);
|
||||
relations
|
||||
.filter((FilterFunction<Relation>) r -> !removeSet.contains(r.getRelClass()))
|
||||
.map((MapFunction<Relation, eu.dnetlib.dhp.schema.dump.oaf.graph.Relation>) relation -> {
|
||||
eu.dnetlib.dhp.schema.dump.oaf.graph.Relation relNew = new eu.dnetlib.dhp.schema.dump.oaf.graph.Relation();
|
||||
relNew
|
||||
.setSource(
|
||||
Node
|
||||
.newInstance(
|
||||
relation.getSource(),
|
||||
ModelSupport.idPrefixEntity.get(relation.getSource().substring(0, 2))));
|
||||
|
||||
relNew
|
||||
.setTarget(
|
||||
Node
|
||||
.newInstance(
|
||||
relation.getTarget(),
|
||||
ModelSupport.idPrefixEntity.get(relation.getTarget().substring(0, 2))));
|
||||
|
||||
relNew
|
||||
.setReltype(
|
||||
RelType
|
||||
.newInstance(
|
||||
relation.getRelClass(),
|
||||
relation.getSubRelType()));
|
||||
|
||||
Optional<DataInfo> odInfo = Optional.ofNullable(relation.getDataInfo());
|
||||
if (odInfo.isPresent()) {
|
||||
DataInfo dInfo = odInfo.get();
|
||||
if (Optional.ofNullable(dInfo.getProvenanceaction()).isPresent() &&
|
||||
Optional.ofNullable(dInfo.getProvenanceaction().getClassname()).isPresent()) {
|
||||
relNew
|
||||
.setProvenance(
|
||||
Provenance
|
||||
.newInstance(
|
||||
dInfo.getProvenanceaction().getClassname(),
|
||||
dInfo.getTrust()));
|
||||
}
|
||||
}
|
||||
if (Boolean.TRUE.equals(relation.getValidated())) {
|
||||
relNew.setValidated(relation.getValidated());
|
||||
relNew.setValidationDate(relation.getValidationDate());
|
||||
}
|
||||
|
||||
return relNew;
|
||||
|
||||
}, Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.graph.Relation.class))
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
/**
|
||||
* Spark job that fires the extraction of relations from entities
|
||||
*/
|
||||
public class SparkExtractRelationFromEntities implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkExtractRelationFromEntities.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkExtractRelationFromEntities.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String resultClassName = parser.get("resultTableName");
|
||||
log.info("resultTableName: {}", resultClassName);
|
||||
|
||||
final String communityMapPath = parser.get("communityMapPath");
|
||||
|
||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||
|
||||
Extractor extractor = new Extractor();
|
||||
extractor.run(isSparkSessionManaged, inputPath, outputPath, inputClazz, communityMapPath);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,176 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.*;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.Provenance;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Node;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.RelType;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
|
||||
/**
|
||||
* Create new Relations between Context Entities and Organizations whose products are associated to the context. It
|
||||
* produces relation such as: organization <-> isRelatedTo <-> context
|
||||
*/
|
||||
public class SparkOrganizationRelation implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkOrganizationRelation.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkOrganizationRelation.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_organization_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final OrganizationMap organizationMap = new Gson()
|
||||
.fromJson(parser.get("organizationCommunityMap"), OrganizationMap.class);
|
||||
final String serializedOrganizationMap = new Gson().toJson(organizationMap);
|
||||
log.info("organization map : {}", serializedOrganizationMap);
|
||||
|
||||
final String communityMapPath = parser.get("communityMapPath");
|
||||
log.info("communityMapPath: {}", communityMapPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
extractRelation(spark, inputPath, organizationMap, outputPath, communityMapPath);
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void extractRelation(SparkSession spark, String inputPath, OrganizationMap organizationMap,
|
||||
String outputPath, String communityMapPath) {
|
||||
|
||||
CommunityMap communityMap = Utils.getCommunityMap(spark, communityMapPath);
|
||||
|
||||
Dataset<Relation> relationDataset = Utils.readPath(spark, inputPath, Relation.class);
|
||||
|
||||
relationDataset.createOrReplaceTempView("relation");
|
||||
|
||||
List<eu.dnetlib.dhp.schema.dump.oaf.graph.Relation> relList = new ArrayList<>();
|
||||
|
||||
Dataset<MergedRels> mergedRelsDataset = spark
|
||||
.sql(
|
||||
"SELECT target organizationId, source representativeId " +
|
||||
"FROM relation " +
|
||||
"WHERE datainfo.deletedbyinference = false " +
|
||||
"AND relclass = 'merges' " +
|
||||
"AND substr(source, 1, 2) = '20'")
|
||||
.as(Encoders.bean(MergedRels.class));
|
||||
|
||||
mergedRelsDataset.map((MapFunction<MergedRels, MergedRels>) mergedRels -> {
|
||||
if (organizationMap.containsKey(mergedRels.getOrganizationId())) {
|
||||
return mergedRels;
|
||||
}
|
||||
return null;
|
||||
}, Encoders.bean(MergedRels.class))
|
||||
.filter(Objects::nonNull)
|
||||
.collectAsList()
|
||||
.forEach(getMergedRelsConsumer(organizationMap, relList, communityMap));
|
||||
|
||||
organizationMap
|
||||
.keySet()
|
||||
.forEach(
|
||||
oId -> organizationMap
|
||||
.get(oId)
|
||||
.forEach(community -> {
|
||||
if (communityMap.containsKey(community)) {
|
||||
addRelations(relList, community, oId);
|
||||
}
|
||||
}));
|
||||
|
||||
spark
|
||||
.createDataset(relList, Encoders.bean(eu.dnetlib.dhp.schema.dump.oaf.graph.Relation.class))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private static Consumer<MergedRels> getMergedRelsConsumer(OrganizationMap organizationMap,
|
||||
List<eu.dnetlib.dhp.schema.dump.oaf.graph.Relation> relList, CommunityMap communityMap) {
|
||||
return mergedRels -> {
|
||||
String oId = mergedRels.getOrganizationId();
|
||||
organizationMap
|
||||
.get(oId)
|
||||
.forEach(community -> {
|
||||
if (communityMap.containsKey(community)) {
|
||||
addRelations(relList, community, mergedRels.getRepresentativeId());
|
||||
}
|
||||
|
||||
});
|
||||
organizationMap.remove(oId);
|
||||
};
|
||||
}
|
||||
|
||||
private static void addRelations(List<eu.dnetlib.dhp.schema.dump.oaf.graph.Relation> relList, String community,
|
||||
String organization) {
|
||||
|
||||
String id = Utils.getContextId(community);
|
||||
log.info("create relation for organization: {}", organization);
|
||||
relList
|
||||
.add(
|
||||
eu.dnetlib.dhp.schema.dump.oaf.graph.Relation
|
||||
.newInstance(
|
||||
Node.newInstance(id, Constants.CONTEXT_ENTITY),
|
||||
Node.newInstance(organization, ModelSupport.idPrefixEntity.get(organization.substring(0, 2))),
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.USER_CLAIM,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DEFAULT_TRUST)));
|
||||
|
||||
relList
|
||||
.add(
|
||||
eu.dnetlib.dhp.schema.dump.oaf.graph.Relation
|
||||
.newInstance(
|
||||
Node.newInstance(organization, ModelSupport.idPrefixEntity.get(organization.substring(0, 2))),
|
||||
Node.newInstance(id, Constants.CONTEXT_ENTITY),
|
||||
RelType.newInstance(ModelConstants.IS_RELATED_TO, ModelConstants.RELATIONSHIP),
|
||||
Provenance
|
||||
.newInstance(
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.USER_CLAIM,
|
||||
eu.dnetlib.dhp.oa.graph.dump.Constants.DEFAULT_TRUST)));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,136 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.oaf.*;
|
||||
|
||||
/**
|
||||
* It selects the valid relations among those present in the graph. One relation is valid if it is not deletedbyinference
|
||||
* and if both the source and the target node are present in the graph and are not deleted by inference nor invisible.
|
||||
* To check this I made a view of the ids of all the entities in the graph, and select the relations for which a join exists
|
||||
* with this view for both the source and the target
|
||||
*/
|
||||
|
||||
public class SparkSelectValidRelationsJob implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkSelectValidRelationsJob.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkSelectValidRelationsJob.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
selectValidRelation(spark, inputPath, outputPath);
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void selectValidRelation(SparkSession spark, String inputPath, String outputPath) {
|
||||
Dataset<Relation> relation = Utils.readPath(spark, inputPath + "/relation", Relation.class);
|
||||
Dataset<Publication> publication = Utils.readPath(spark, inputPath + "/publication", Publication.class);
|
||||
Dataset<eu.dnetlib.dhp.schema.oaf.Dataset> dataset = Utils
|
||||
.readPath(spark, inputPath + "/dataset", eu.dnetlib.dhp.schema.oaf.Dataset.class);
|
||||
Dataset<Software> software = Utils.readPath(spark, inputPath + "/software", Software.class);
|
||||
Dataset<OtherResearchProduct> other = Utils
|
||||
.readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class);
|
||||
Dataset<Organization> organization = Utils.readPath(spark, inputPath + "/organization", Organization.class);
|
||||
Dataset<Project> project = Utils.readPath(spark, inputPath + "/project", Project.class);
|
||||
Dataset<Datasource> datasource = Utils.readPath(spark, inputPath + "/datasource", Datasource.class);
|
||||
|
||||
relation.createOrReplaceTempView("relation");
|
||||
publication.createOrReplaceTempView("publication");
|
||||
dataset.createOrReplaceTempView("dataset");
|
||||
other.createOrReplaceTempView("other");
|
||||
software.createOrReplaceTempView("software");
|
||||
organization.createOrReplaceTempView("organization");
|
||||
project.createOrReplaceTempView("project");
|
||||
datasource.createOrReplaceTempView("datasource");
|
||||
|
||||
spark
|
||||
.sql(
|
||||
"SELECT id " +
|
||||
"FROM publication " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM dataset " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM other " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM software " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM organization " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM project " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " +
|
||||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM datasource " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false ")
|
||||
.createOrReplaceTempView("identifiers");
|
||||
|
||||
spark
|
||||
.sql(
|
||||
"SELECT relation.* " +
|
||||
"FROM relation " +
|
||||
"JOIN identifiers i1 " +
|
||||
"ON source = i1.id " +
|
||||
"JOIN identifiers i2 " +
|
||||
"ON target = i2.id " +
|
||||
"WHERE datainfo.deletedbyinference = false")
|
||||
.as(Encoders.bean(Relation.class))
|
||||
.write()
|
||||
.option("compression", "gzip")
|
||||
.mode(SaveMode.Overwrite)
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.exceptions;
|
||||
|
||||
public class MyRuntimeException extends RuntimeException {
|
||||
|
||||
public MyRuntimeException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public MyRuntimeException(
|
||||
final String message,
|
||||
final Throwable cause,
|
||||
final boolean enableSuppression,
|
||||
final boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public MyRuntimeException(final String message, final Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public MyRuntimeException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public MyRuntimeException(final Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.exceptions;
|
||||
|
||||
public class NoAvailableEntityTypeException extends Exception {
|
||||
public NoAvailableEntityTypeException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public NoAvailableEntityTypeException(
|
||||
final String message,
|
||||
final Throwable cause,
|
||||
final boolean enableSuppression,
|
||||
final boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public NoAvailableEntityTypeException(final String message, final Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public NoAvailableEntityTypeException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public NoAvailableEntityTypeException(final Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.sql.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
|
||||
|
||||
/**
|
||||
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
|
||||
* for the EC it specifies also the fundingStream (FP7 or H2020)
|
||||
*/
|
||||
public class SparkDumpFunderResults implements Serializable {
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkDumpFunderResults.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkDumpFunderResults.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/funder_result_parameters.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String graphPath = parser.get("graphPath");
|
||||
log.info("relationPath: {}", graphPath);
|
||||
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
writeResultProjectList(spark, inputPath, outputPath, graphPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath,
|
||||
String graphPath) {
|
||||
|
||||
Dataset<eu.dnetlib.dhp.schema.oaf.Project> project = Utils
|
||||
.readPath(spark, graphPath + "/project", eu.dnetlib.dhp.schema.oaf.Project.class);
|
||||
|
||||
Dataset<CommunityResult> result = Utils
|
||||
.readPath(spark, inputPath + "/publication", CommunityResult.class)
|
||||
.union(Utils.readPath(spark, inputPath + "/dataset", CommunityResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/orp", CommunityResult.class))
|
||||
.union(Utils.readPath(spark, inputPath + "/software", CommunityResult.class));
|
||||
|
||||
List<String> funderList = project
|
||||
.select("id")
|
||||
.map((MapFunction<Row, String>) value -> value.getString(0).substring(0, 15), Encoders.STRING())
|
||||
.distinct()
|
||||
.collectAsList();
|
||||
|
||||
funderList.forEach(funder -> {
|
||||
String fundernsp = funder.substring(3);
|
||||
String funderdump;
|
||||
if (fundernsp.startsWith("corda")) {
|
||||
funderdump = "EC_";
|
||||
if (fundernsp.endsWith("h2020")) {
|
||||
funderdump += "H2020";
|
||||
} else {
|
||||
funderdump += "FP7";
|
||||
}
|
||||
} else {
|
||||
funderdump = fundernsp.substring(0, fundernsp.indexOf("_")).toUpperCase();
|
||||
}
|
||||
writeFunderResult(funder, result, outputPath, funderdump);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private static void dumpResults(String nsp, Dataset<CommunityResult> results, String outputPath,
|
||||
String funderName) {
|
||||
|
||||
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
|
||||
if (!Optional.ofNullable(r.getProjects()).isPresent()) {
|
||||
return null;
|
||||
}
|
||||
for (Project p : r.getProjects()) {
|
||||
if (p.getId().startsWith(nsp)) {
|
||||
if (nsp.startsWith("40|irb")) {
|
||||
if (p.getFunder().getShortName().equals(funderName))
|
||||
return r;
|
||||
else
|
||||
return null;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}, Encoders.bean(CommunityResult.class))
|
||||
.filter(Objects::nonNull)
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath + "/" + funderName);
|
||||
}
|
||||
|
||||
private static void writeFunderResult(String funder, Dataset<CommunityResult> results, String outputPath,
|
||||
String funderDump) {
|
||||
|
||||
if (funder.startsWith("40|irb")) {
|
||||
dumpResults(funder, results, outputPath, "HRZZ");
|
||||
dumpResults(funder, results, outputPath, "MZOS");
|
||||
} else
|
||||
dumpResults(funder, results, outputPath, funderDump);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.spark.SparkConf;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||
import org.apache.spark.sql.Dataset;
|
||||
import org.apache.spark.sql.Encoders;
|
||||
import org.apache.spark.sql.SaveMode;
|
||||
import org.apache.spark.sql.SparkSession;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
|
||||
/**
|
||||
* Selects the results linked to projects. Only for these results the dump will be performed.
|
||||
* The code to perform the dump and to expend the dumped results with the information related to projects
|
||||
* is the one used for the dump of the community products
|
||||
*/
|
||||
public class SparkResultLinkedToProject implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkResultLinkedToProject.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String jsonConfiguration = IOUtils
|
||||
.toString(
|
||||
SparkResultLinkedToProject.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/dump/input_parameters_link_prj.json"));
|
||||
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||
parser.parseArgument(args);
|
||||
|
||||
Boolean isSparkSessionManaged = Optional
|
||||
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||
.map(Boolean::valueOf)
|
||||
.orElse(Boolean.TRUE);
|
||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||
|
||||
final String inputPath = parser.get("sourcePath");
|
||||
log.info("inputPath: {}", inputPath);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath: {}", outputPath);
|
||||
|
||||
final String resultClassName = parser.get("resultTableName");
|
||||
log.info("resultTableName: {}", resultClassName);
|
||||
|
||||
final String graphPath = parser.get("graphPath");
|
||||
log.info("graphPath: {}", graphPath);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Class<? extends Result> inputClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||
SparkConf conf = new SparkConf();
|
||||
|
||||
runWithSparkSession(
|
||||
conf,
|
||||
isSparkSessionManaged,
|
||||
spark -> {
|
||||
Utils.removeOutputDir(spark, outputPath);
|
||||
writeResultsLinkedToProjects(spark, inputClazz, inputPath, outputPath, graphPath);
|
||||
});
|
||||
}
|
||||
|
||||
private static <R extends Result> void writeResultsLinkedToProjects(SparkSession spark, Class<R> inputClazz,
|
||||
String inputPath, String outputPath, String graphPath) {
|
||||
|
||||
Dataset<R> results = Utils
|
||||
.readPath(spark, inputPath, inputClazz)
|
||||
.filter("dataInfo.deletedbyinference = false and datainfo.invisible = false");
|
||||
Dataset<Relation> relations = Utils
|
||||
.readPath(spark, graphPath + "/relation", Relation.class)
|
||||
.filter(
|
||||
"dataInfo.deletedbyinference = false and lower(relClass) = '"
|
||||
+ ModelConstants.IS_PRODUCED_BY.toLowerCase() + "'");
|
||||
Dataset<Project> project = Utils.readPath(spark, graphPath + "/project", Project.class);
|
||||
|
||||
results.createOrReplaceTempView("result");
|
||||
relations.createOrReplaceTempView("relation");
|
||||
project.createOrReplaceTempView("project");
|
||||
|
||||
Dataset<R> tmp = spark
|
||||
.sql(
|
||||
"Select res.* " +
|
||||
"from relation rel " +
|
||||
"join result res " +
|
||||
"on rel.source = res.id " +
|
||||
"join project p " +
|
||||
"on rel.target = p.id " +
|
||||
"")
|
||||
.as(Encoders.bean(inputClazz));
|
||||
tmp
|
||||
.groupByKey(
|
||||
(MapFunction<R, String>) value -> value
|
||||
.getId(),
|
||||
Encoders.STRING())
|
||||
.mapGroups((MapGroupsFunction<String, R, R>) (k, it) -> it.next(), Encoders.bean(inputClazz))
|
||||
.write()
|
||||
.mode(SaveMode.Overwrite)
|
||||
.option("compression", "gzip")
|
||||
.json(outputPath);
|
||||
|
||||
}
|
||||
}
|
|
@ -267,10 +267,13 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
|
||||
res
|
||||
.add(
|
||||
getRelation(
|
||||
docId, projectId, RESULT_PROJECT, OUTCOME, IS_PRODUCED_BY, entity, validationdDate));
|
||||
OafMapperUtils
|
||||
.getRelation(
|
||||
docId, projectId, RESULT_PROJECT, OUTCOME, IS_PRODUCED_BY, entity, validationdDate));
|
||||
res
|
||||
.add(getRelation(projectId, docId, RESULT_PROJECT, OUTCOME, PRODUCES, entity, validationdDate));
|
||||
.add(
|
||||
OafMapperUtils
|
||||
.getRelation(projectId, docId, RESULT_PROJECT, OUTCOME, PRODUCES, entity, validationdDate));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -303,13 +306,16 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
final String targetId = createOpenaireId(targetType, target, true);
|
||||
rels
|
||||
.add(
|
||||
getRelation(
|
||||
entity.getId(), targetId, relType, subRelType, relClass, entity, validationdDate));
|
||||
OafMapperUtils
|
||||
.getRelation(
|
||||
entity.getId(), targetId, relType, subRelType, relClass, entity,
|
||||
validationdDate));
|
||||
rels
|
||||
.add(
|
||||
getRelation(
|
||||
targetId, entity.getId(), relType, subRelType, relClassInverse, entity,
|
||||
validationdDate));
|
||||
OafMapperUtils
|
||||
.getRelation(
|
||||
targetId, entity.getId(), relType, subRelType, relClassInverse, entity,
|
||||
validationdDate));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -317,36 +323,6 @@ public abstract class AbstractMdRecordToOafMapper {
|
|||
return rels;
|
||||
}
|
||||
|
||||
protected Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final OafEntity entity) {
|
||||
return getRelation(source, target, relType, subRelType, relClass, entity, null);
|
||||
}
|
||||
|
||||
protected Relation getRelation(final String source,
|
||||
final String target,
|
||||
final String relType,
|
||||
final String subRelType,
|
||||
final String relClass,
|
||||
final OafEntity entity,
|
||||
final String validationDate) {
|
||||
final Relation rel = new Relation();
|
||||
rel.setRelType(relType);
|
||||
rel.setSubRelType(subRelType);
|
||||
rel.setRelClass(relClass);
|
||||
rel.setSource(source);
|
||||
rel.setTarget(target);
|
||||
rel.setCollectedfrom(entity.getCollectedfrom());
|
||||
rel.setDataInfo(entity.getDataInfo());
|
||||
rel.setLastupdatetimestamp(entity.getLastupdatetimestamp());
|
||||
rel.setValidated(StringUtils.isNotBlank(validationDate));
|
||||
rel.setValidationDate(StringUtils.isNotBlank(validationDate) ? validationDate : null);
|
||||
return rel;
|
||||
}
|
||||
|
||||
protected abstract List<Oaf> addOtherResultRels(
|
||||
final Document doc,
|
||||
final OafEntity entity);
|
||||
|
|
|
@ -1,32 +1,7 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.raw;
|
||||
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASET_DEFAULT_RESULTTYPE;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DATASOURCE_ORGANIZATION;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.DNET_PROVENANCE_ACTIONS;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.ENTITYREGISTRY_PROVENANCE_ACTION;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.HAS_PARTICIPANT;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_MERGED_IN;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_PARTICIPANT;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_PRODUCED_BY;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_PROVIDED_BY;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.IS_RELATED_TO;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.MERGES;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.ORG_ORG_RELTYPE;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.ORP_DEFAULT_RESULTTYPE;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.OUTCOME;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PARTICIPATION;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PRODUCES;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PROJECT_ORGANIZATION;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PROVIDES;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PROVISION;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PUBLICATION_DATASET;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.PUBLICATION_DEFAULT_RESULTTYPE;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.RELATIONSHIP;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_PROJECT;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.RESULT_RESULT;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.USER_CLAIM;
|
||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
|
||||
import static eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils.*;
|
||||
|
||||
import java.io.Closeable;
|
||||
|
@ -45,6 +20,8 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.DbClient;
|
||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||
|
@ -68,6 +45,7 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
|
|||
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||
|
||||
public class MigrateDbEntitiesApplication extends AbstractMigrationApplication implements Closeable {
|
||||
|
@ -437,25 +415,14 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
|||
final List<KeyValue> collectedFrom = listKeyValues(
|
||||
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||
|
||||
final Relation r1 = new Relation();
|
||||
r1.setRelType(DATASOURCE_ORGANIZATION);
|
||||
r1.setSubRelType(PROVISION);
|
||||
r1.setRelClass(IS_PROVIDED_BY);
|
||||
r1.setSource(dsId);
|
||||
r1.setTarget(orgId);
|
||||
r1.setCollectedfrom(collectedFrom);
|
||||
r1.setDataInfo(info);
|
||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final Relation r1 = OafMapperUtils
|
||||
.getRelation(
|
||||
dsId, orgId, DATASOURCE_ORGANIZATION, PRODUCES, IS_PROVIDED_BY, collectedFrom, info,
|
||||
lastUpdateTimestamp);
|
||||
|
||||
final Relation r2 = new Relation();
|
||||
r2.setRelType(DATASOURCE_ORGANIZATION);
|
||||
r2.setSubRelType(PROVISION);
|
||||
r2.setRelClass(PROVIDES);
|
||||
r2.setSource(orgId);
|
||||
r2.setTarget(dsId);
|
||||
r2.setCollectedfrom(collectedFrom);
|
||||
r2.setDataInfo(info);
|
||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final Relation r2 = OafMapperUtils
|
||||
.getRelation(
|
||||
orgId, dsId, DATASOURCE_ORGANIZATION, PRODUCES, PROVIDES, collectedFrom, info, lastUpdateTimestamp);
|
||||
|
||||
return Arrays.asList(r1, r2);
|
||||
} catch (final Exception e) {
|
||||
|
@ -471,25 +438,20 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
|||
final List<KeyValue> collectedFrom = listKeyValues(
|
||||
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||
|
||||
final Relation r1 = new Relation();
|
||||
r1.setRelType(PROJECT_ORGANIZATION);
|
||||
r1.setSubRelType(PARTICIPATION);
|
||||
r1.setRelClass(HAS_PARTICIPANT);
|
||||
r1.setSource(projectId);
|
||||
r1.setTarget(orgId);
|
||||
r1.setCollectedfrom(collectedFrom);
|
||||
r1.setDataInfo(info);
|
||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final List<KeyValue> properties = Lists
|
||||
.newArrayList(
|
||||
keyValue("contribution", String.valueOf(rs.getDouble("contribution"))),
|
||||
keyValue("currency", rs.getString("currency")));
|
||||
|
||||
final Relation r2 = new Relation();
|
||||
r2.setRelType(PROJECT_ORGANIZATION);
|
||||
r2.setSubRelType(PARTICIPATION);
|
||||
r2.setRelClass(IS_PARTICIPANT);
|
||||
r2.setSource(orgId);
|
||||
r2.setTarget(projectId);
|
||||
r2.setCollectedfrom(collectedFrom);
|
||||
r2.setDataInfo(info);
|
||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final Relation r1 = OafMapperUtils
|
||||
.getRelation(
|
||||
projectId, orgId, PROJECT_ORGANIZATION, PARTICIPATION, HAS_PARTICIPANT, collectedFrom, info,
|
||||
lastUpdateTimestamp, null, properties);
|
||||
|
||||
final Relation r2 = OafMapperUtils
|
||||
.getRelation(
|
||||
orgId, projectId, PROJECT_ORGANIZATION, PARTICIPATION, IS_PARTICIPANT, collectedFrom, info,
|
||||
lastUpdateTimestamp, null, properties);
|
||||
|
||||
return Arrays.asList(r1, r2);
|
||||
} catch (final Exception e) {
|
||||
|
@ -703,25 +665,12 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
|||
final List<KeyValue> collectedFrom = listKeyValues(
|
||||
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||
|
||||
final Relation r1 = new Relation();
|
||||
r1.setRelType(ORG_ORG_RELTYPE);
|
||||
r1.setSubRelType(ModelConstants.DEDUP);
|
||||
r1.setRelClass(MERGES);
|
||||
r1.setSource(orgId1);
|
||||
r1.setTarget(orgId2);
|
||||
r1.setCollectedfrom(collectedFrom);
|
||||
r1.setDataInfo(info);
|
||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final Relation r1 = OafMapperUtils
|
||||
.getRelation(orgId1, orgId2, ORG_ORG_RELTYPE, DEDUP, MERGES, collectedFrom, info, lastUpdateTimestamp);
|
||||
|
||||
final Relation r2 = new Relation();
|
||||
r2.setRelType(ORG_ORG_RELTYPE);
|
||||
r2.setSubRelType(ModelConstants.DEDUP);
|
||||
r2.setRelClass(IS_MERGED_IN);
|
||||
r2.setSource(orgId2);
|
||||
r2.setTarget(orgId1);
|
||||
r2.setCollectedfrom(collectedFrom);
|
||||
r2.setDataInfo(info);
|
||||
r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
final Relation r2 = OafMapperUtils
|
||||
.getRelation(
|
||||
orgId2, orgId1, ORG_ORG_RELTYPE, DEDUP, IS_MERGED_IN, collectedFrom, info, lastUpdateTimestamp);
|
||||
return Arrays.asList(r1, r2);
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -738,17 +687,12 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
|||
final List<KeyValue> collectedFrom = listKeyValues(
|
||||
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||
|
||||
final Relation r = new Relation();
|
||||
r.setRelType(ORG_ORG_RELTYPE);
|
||||
r.setSubRelType(ModelConstants.RELATIONSHIP);
|
||||
r.setRelClass(rs.getString("type"));
|
||||
r.setSource(orgId1);
|
||||
r.setTarget(orgId2);
|
||||
r.setCollectedfrom(collectedFrom);
|
||||
r.setDataInfo(info);
|
||||
r.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
|
||||
return Arrays.asList(r);
|
||||
return Arrays
|
||||
.asList(
|
||||
OafMapperUtils
|
||||
.getRelation(
|
||||
orgId1, orgId2, ORG_ORG_RELTYPE, RELATIONSHIP, rs.getString("type"), collectedFrom, info,
|
||||
lastUpdateTimestamp));
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@ -765,29 +709,12 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
|||
final List<KeyValue> collectedFrom = listKeyValues(
|
||||
createOpenaireId(10, rs.getString("collectedfromid"), true), rs.getString("collectedfromname"));
|
||||
|
||||
final Relation r1 = new Relation();
|
||||
r1.setRelType(ORG_ORG_RELTYPE);
|
||||
r1.setSubRelType(ModelConstants.DEDUP);
|
||||
r1.setRelClass(relClass);
|
||||
r1.setSource(orgId1);
|
||||
r1.setTarget(orgId2);
|
||||
r1.setCollectedfrom(collectedFrom);
|
||||
r1.setDataInfo(info);
|
||||
r1.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
|
||||
// removed because there's no difference between two sides //TODO
|
||||
// final Relation r2 = new Relation();
|
||||
// r2.setRelType(ORG_ORG_RELTYPE);
|
||||
// r2.setSubRelType(ORG_ORG_SUBRELTYPE);
|
||||
// r2.setRelClass(relClass);
|
||||
// r2.setSource(orgId2);
|
||||
// r2.setTarget(orgId1);
|
||||
// r2.setCollectedfrom(collectedFrom);
|
||||
// r2.setDataInfo(info);
|
||||
// r2.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||
// return Arrays.asList(r1, r2);
|
||||
|
||||
return Arrays.asList(r1);
|
||||
return Arrays
|
||||
.asList(
|
||||
OafMapperUtils
|
||||
.getRelation(
|
||||
orgId1, orgId2, ORG_ORG_RELTYPE, DEDUP, relClass, collectedFrom, info,
|
||||
lastUpdateTimestamp));
|
||||
} catch (final Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ import org.slf4j.LoggerFactory;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||
import eu.dnetlib.dhp.oa.graph.raw.common.RelationIdMapping;
|
||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||
import scala.Tuple2;
|
||||
|
@ -78,8 +77,8 @@ public class PatchRelationsApplication {
|
|||
|
||||
final String relationPath = graphBasePath + "/relation";
|
||||
|
||||
final Dataset<Relation> rels = Utils.readPath(spark, relationPath, Relation.class);
|
||||
final Dataset<RelationIdMapping> idMapping = Utils.readPath(spark, idMappingPath, RelationIdMapping.class);
|
||||
final Dataset<Relation> rels = readPath(spark, relationPath, Relation.class);
|
||||
final Dataset<RelationIdMapping> idMapping = readPath(spark, idMappingPath, RelationIdMapping.class);
|
||||
|
||||
log.info("relations: {}", rels.count());
|
||||
log.info("idMapping: {}", idMapping.count());
|
||||
|
@ -122,4 +121,12 @@ public class PatchRelationsApplication {
|
|||
.text(relationPath);
|
||||
}
|
||||
|
||||
public static <R> Dataset<R> readPath(
|
||||
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||
return spark
|
||||
.read()
|
||||
.textFile(inputPath)
|
||||
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "gp",
|
||||
"paramLongName": "graphPath",
|
||||
"paramDescription": "the relationPath",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
|
@ -1,37 +0,0 @@
|
|||
|
||||
[
|
||||
|
||||
{
|
||||
"paramName":"is",
|
||||
"paramLongName":"isLookUpUrl",
|
||||
"paramDescription": "URL of the isLookUp Service",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"nn",
|
||||
"paramLongName":"nameNode",
|
||||
"paramDescription": "the name node",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "sd",
|
||||
"paramLongName": "singleDeposition",
|
||||
"paramDescription": "true if the dump should be created for a single community",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ci",
|
||||
"paramLongName": "communityId",
|
||||
"paramDescription": "the id of the community for which to create the dump",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "ra",
|
||||
"paramLongName": "resultAggregation",
|
||||
"paramDescription": "true if all the result type should be saved under the generic result name. false to get a different dump for each result type",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName":"cmp",
|
||||
"paramLongName":"communityMapPath",
|
||||
"paramDescription": "the path to the serialization of the community map",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"tn",
|
||||
"paramLongName":"resultTableName",
|
||||
"paramDescription": "the name of the result table we are currently working on",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"is",
|
||||
"paramLongName":"isLookUpUrl",
|
||||
"paramDescription": "URL of the isLookUp Service",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "hdfs",
|
||||
"paramLongName": "hdfsPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "nn",
|
||||
"paramLongName": "nameNode",
|
||||
"paramDescription": "the name node",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"ocm",
|
||||
"paramLongName":"organizationCommunityMap",
|
||||
"paramDescription": "the organization community map association",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"cmp",
|
||||
"paramLongName":"communityMapPath",
|
||||
"paramDescription": "the path to the serialization of the community map",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"cmp",
|
||||
"paramLongName":"communityMapPath",
|
||||
"paramDescription": "the path to the serialization of the community map",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"tn",
|
||||
"paramLongName":"resultTableName",
|
||||
"paramDescription": "the name of the result table we are currently working on",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"dt",
|
||||
"paramLongName":"dumpType",
|
||||
"paramDescription": "the type of the dump (complete for the whole graph, community for the products related to communities, funder for the results with at least a link to project",
|
||||
"paramRequired": false
|
||||
}, {
|
||||
"paramName":"cid",
|
||||
"paramLongName":"communityId",
|
||||
"paramDescription": "the id of the community to be dumped",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"tn",
|
||||
"paramLongName":"resultTableName",
|
||||
"paramDescription": "the name of the result table we are currently working on",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"gp",
|
||||
"paramLongName":"graphPath",
|
||||
"paramDescription": "the path to the relations",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
|
||||
|
||||
[
|
||||
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "rs",
|
||||
"paramLongName": "removeSet",
|
||||
"paramDescription": "the list of classname relations, split by ';', not to be dumped",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName": "pip",
|
||||
"paramLongName": "preparedInfoPath",
|
||||
"paramDescription": "the path of the association result projectlist",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
[
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -1,37 +0,0 @@
|
|||
|
||||
[
|
||||
|
||||
{
|
||||
"paramName":"cmp",
|
||||
"paramLongName":"communityMapPath",
|
||||
"paramDescription": "the path to the serialization of the community map",
|
||||
"paramRequired": false
|
||||
},
|
||||
|
||||
{
|
||||
"paramName":"s",
|
||||
"paramLongName":"sourcePath",
|
||||
"paramDescription": "the path of the sequencial file to read",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "out",
|
||||
"paramLongName": "outputPath",
|
||||
"paramDescription": "the path used to store temporary output files",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "ssm",
|
||||
"paramLongName": "isSparkSessionManaged",
|
||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||
"paramRequired": false
|
||||
}, {
|
||||
"paramName":"cid",
|
||||
"paramLongName":"communityId",
|
||||
"paramDescription": "the id of the community to be dumped",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
|
||||
[
|
||||
{
|
||||
"paramName":"dt",
|
||||
"paramLongName":"depositionType",
|
||||
"paramDescription": "the type of the deposition (new, version, update)",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName":"cri",
|
||||
"paramLongName":"conceptRecordId",
|
||||
"paramDescription": "The id of the concept record for a new version",
|
||||
"paramRequired": false
|
||||
},
|
||||
|
||||
{
|
||||
"paramName":"di",
|
||||
"paramLongName":"depositionId",
|
||||
"paramDescription": "the id of an open deposition which has not been published",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"hdfsp",
|
||||
"paramLongName":"hdfsPath",
|
||||
"paramDescription": "the path of the folder tofind files to send to Zenodo",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "nn",
|
||||
"paramLongName": "nameNode",
|
||||
"paramDescription": "the name node",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "at",
|
||||
"paramLongName": "accessToken",
|
||||
"paramDescription": "the access token for the deposition",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"cu",
|
||||
"paramLongName":"connectionUrl",
|
||||
"paramDescription": "the url to connect to deposit",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"m",
|
||||
"paramLongName":"metadata",
|
||||
"paramDescription": "metadata associated to the deposition",
|
||||
"paramRequired": false
|
||||
},
|
||||
{
|
||||
"paramName":"p",
|
||||
"paramLongName":"publish",
|
||||
"paramDescription": "if to publish the upload",
|
||||
"paramRequired": false
|
||||
}
|
||||
]
|
|
@ -1,30 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,4 +0,0 @@
|
|||
## This is a classpath-based import file (this header is required)
|
||||
dump_complete classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/complete/oozie_app
|
||||
dump_funder classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/funder/oozie_app
|
||||
dump_community classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/community/oozie_app
|
|
@ -1,306 +0,0 @@
|
|||
<workflow-app name="dump_graph" xmlns="uri:oozie:workflow:0.5">
|
||||
|
||||
<parameters>
|
||||
<property>
|
||||
<name>singleDeposition</name>
|
||||
<description>Indicates if it is a single community deposition</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>communityId</name>
|
||||
<description>the id of the community to be dumped if a dump for a single community should be done</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>dumpType</name>
|
||||
<description>the type of the dump one of {complete, community, funder}</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>onlyUpload</name>
|
||||
<description>true if the dump is already done and should only be upload in zenodo</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>upload</name>
|
||||
<description>true if the dump should be upload in zenodo</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>isLookUpUrl</name>
|
||||
<description>the isLookup service endpoint</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>resultAggregation</name>
|
||||
<description>true if all the result type have to be dumped under result. false otherwise</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>accessToken</name>
|
||||
<description>the access token used for the deposition in Zenodo</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>connectionUrl</name>
|
||||
<description>the connection url for Zenodo</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>metadata</name>
|
||||
<description> the metadata associated to the deposition</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>depositionType</name>
|
||||
<description>the type of deposition we want to perform. "new" for brand new deposition, "version" for a new version of a published deposition (in this case the concept record id must be provided), "upload" to upload content to an open deposition for which we already have the deposition id (in this case the deposition id should be provided)</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>conceptRecordId</name>
|
||||
<description>for new version, the id of the record for the old deposition</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>depositionId</name>
|
||||
<description>the depositionId of a deposition open that has to be added content</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>organizationCommunityMap</name>
|
||||
<description>the organization community map</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="only_upload"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<decision name="only_upload">
|
||||
<switch>
|
||||
<case to="send_zenodo">${wf:conf('onlyUpload') eq true}</case>
|
||||
<default to="reset_outputpath"/>
|
||||
</switch>
|
||||
</decision>
|
||||
|
||||
<action name="reset_outputpath">
|
||||
<fs>
|
||||
<delete path="${outputPath}"/>
|
||||
<mkdir path="${outputPath}"/>
|
||||
</fs>
|
||||
<ok to="save_community_map"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="save_community_map">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SaveCommunityMap</main-class>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/communityMap</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||
<arg>--singleDeposition</arg><arg>${singleDeposition}</arg>
|
||||
<arg>--communityId</arg><arg>${communityId}</arg>
|
||||
</java>
|
||||
<ok to="choose_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<decision name="choose_dump">
|
||||
<switch>
|
||||
<case to="dump_funder">${wf:conf('dumpType') eq "funder"}</case>
|
||||
<case to="dump_community">${wf:conf('dumpType') eq "community"}</case>
|
||||
<default to="dump_complete"/>
|
||||
</switch>
|
||||
</decision>
|
||||
|
||||
<!-- Sub-workflow which runs the dump for the complete graph -->
|
||||
<action name="dump_complete">
|
||||
<sub-workflow>
|
||||
<app-path>${wf:appPath()}/dump_complete
|
||||
</app-path>
|
||||
<propagate-configuration/>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<value>${workingDir}/communityMap</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<value>${workingDir}/tar</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>organizationCommunityMap</name>
|
||||
<value>${organizationCommunityMap}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>isLookUpUrl</name>
|
||||
<value>${isLookUpUrl}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>resultAggregation</name>
|
||||
<value>${resultAggregation}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="make_archive" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
<!-- Sub-workflow which runs the dump for the complete graph -->
|
||||
<action name="dump_community">
|
||||
<sub-workflow>
|
||||
<app-path>${wf:appPath()}/dump_community
|
||||
</app-path>
|
||||
<propagate-configuration/>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<value>${workingDir}/communityMap</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<value>${workingDir}/tar</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="make_archive" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
<action name="dump_funder">
|
||||
<sub-workflow>
|
||||
<app-path>${wf:appPath()}/dump_funder
|
||||
</app-path>
|
||||
<propagate-configuration/>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<value>${workingDir}/communityMap</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<value>${workingDir}/tar</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>dumpType</name>
|
||||
<value>${dumpType}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="make_archive" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
<action name="make_archive">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/tar</arg>
|
||||
</java>
|
||||
<ok to="should_upload"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<decision name="should_upload">
|
||||
<switch>
|
||||
<case to="send_zenodo">${wf:conf('upload') eq true}</case>
|
||||
<default to="End"/>
|
||||
</switch>
|
||||
</decision>
|
||||
|
||||
<action name="send_zenodo">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
||||
<arg>--metadata</arg><arg>${metadata}</arg>
|
||||
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
||||
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
||||
<arg>--depositionId</arg><arg>${depositionId}</arg>
|
||||
</java>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -1,30 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,347 +0,0 @@
|
|||
<workflow-app name="sub_dump_community_funder_results" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<description>the path to the community map</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>selectedResults</name>
|
||||
<description>the path the the possible subset ot results to be dumped</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="fork_dump"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
|
||||
|
||||
|
||||
<fork name="fork_dump">
|
||||
<path start="dump_publication"/>
|
||||
<path start="dump_dataset"/>
|
||||
<path start="dump_orp"/>
|
||||
<path start="dump_software"/>
|
||||
</fork>
|
||||
|
||||
<action name="dump_publication">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table publication for community/funder related products</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${selectedResults}/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
<arg>--dumpType</arg><arg>${dumpType}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table dataset for community/funder related products</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${selectedResults}/dataset</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/dump/dataset</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_orp">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table ORP for community related products</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${selectedResults}/otherresearchproduct</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table software for community related products</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${selectedResults}/software</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/dump/software</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_dump" to="prepareResultProject"/>
|
||||
|
||||
<action name="prepareResultProject">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Prepare association result subset of project info</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
</spark>
|
||||
<ok to="fork_extendWithProject"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<fork name="fork_extendWithProject">
|
||||
<path start="extend_publication"/>
|
||||
<path start="extend_dataset"/>
|
||||
<path start="extend_orp"/>
|
||||
<path start="extend_software"/>
|
||||
</fork>
|
||||
|
||||
<action name="extend_publication">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Extend dumped publications with information about project</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/publication</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/ext/publication</arg>
|
||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
</spark>
|
||||
<ok to="join_extend"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="extend_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Extend dumped dataset with information about project</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/ext/dataset</arg>
|
||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
</spark>
|
||||
<ok to="join_extend"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="extend_orp">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Extend dumped ORP with information about project</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/ext/orp</arg>
|
||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
</spark>
|
||||
<ok to="join_extend"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="extend_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Extend dumped software with information about project</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/software</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/ext/software</arg>
|
||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||
</spark>
|
||||
<ok to="join_extend"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
<join name="join_extend" to="End"/>
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
||||
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,2 +0,0 @@
|
|||
## This is a classpath-based import file (this header is required)
|
||||
dump_common classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/commoncommunityfunder/oozie_app
|
|
@ -1,145 +0,0 @@
|
|||
<workflow-app name="sub_dump_community_products" xmlns="uri:oozie:workflow:0.5">
|
||||
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="common_action_community_funder"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
<action name="common_action_community_funder">
|
||||
<sub-workflow>
|
||||
<app-path>${wf:appPath()}/dump_common
|
||||
</app-path>
|
||||
<propagate-configuration/>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>selectedResults</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<value>${workingDir}/communityMap</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<value>${workingDir}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="splitForCommunities" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
|
||||
|
||||
<action name="splitForCommunities">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Split dumped result for community</name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -1,30 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,538 +0,0 @@
|
|||
<workflow-app name="sub-dump_complete" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>resultAggregation</name>
|
||||
<description>true if all the result type have to be dumped under result. false otherwise</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>organizationCommunityMap</name>
|
||||
<description>the organization community map</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="fork_dump" />
|
||||
|
||||
|
||||
|
||||
<fork name="fork_dump">
|
||||
<path start="dump_publication"/>
|
||||
<path start="dump_dataset"/>
|
||||
<path start="dump_orp"/>
|
||||
<path start="dump_software"/>
|
||||
<path start="dump_organization"/>
|
||||
<path start="dump_project"/>
|
||||
<path start="dump_datasource"/>
|
||||
<path start="select_relation"/>
|
||||
</fork>
|
||||
|
||||
<action name="dump_publication">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table publication </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_dataset">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table dataset </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_orp">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table ORP </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_software">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table software </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_organization">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table organization </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/organization</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/organization</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_project">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table project </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/project</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/project</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_datasource">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table datasource </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/datasource</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}/datasource</arg>
|
||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="select_relation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Select valid table relation </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkSelectValidRelationsJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/validrelation</arg>
|
||||
</spark>
|
||||
<ok to="dump_relation"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="dump_relation">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table relation </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpRelationJob</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/validrelation</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/relation</arg>
|
||||
<arg>--removeSet</arg><arg>${removeSet}</arg>
|
||||
</spark>
|
||||
<ok to="join_dump"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_dump" to="fork_context"/>
|
||||
|
||||
<fork name="fork_context">
|
||||
<path start="create_entities_fromcontext"/>
|
||||
<path start="create_relation_fromcontext"/>
|
||||
<path start="create_relation_fromorgs"/>
|
||||
</fork>
|
||||
|
||||
<action name="create_entities_fromcontext">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextEntities</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}/communities_infrastructures/communities_infrastructure.json.gz</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||
</java>
|
||||
<ok to="join_context"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="create_relation_fromcontext">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextRelation</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${workingDir}/relation/context</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||
</java>
|
||||
<ok to="join_context"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="create_relation_fromorgs">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table relation </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkOrganizationRelation</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/contextOrg</arg>
|
||||
<arg>--organizationCommunityMap</arg><arg>${organizationCommunityMap}</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_context"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_context" to="fork_extract_relations"/>
|
||||
|
||||
<fork name="fork_extract_relations">
|
||||
<path start="rels_from_pubs"/>
|
||||
<path start="rels_from_dats"/>
|
||||
<path start="rels_from_orp"/>
|
||||
<path start="rels_from_sw"/>
|
||||
</fork>
|
||||
|
||||
<action name="rels_from_pubs">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Extract Relations from publication </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/publication</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_extract_relations"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="rels_from_dats">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table dataset </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/dataset</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_extract_relations"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="rels_from_orp">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table ORP </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/orp</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_extract_relations"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="rels_from_sw">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump table software </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/relation/software</arg>
|
||||
<arg>--communityMapPath</arg><arg>${communityMapPath}</arg>
|
||||
</spark>
|
||||
<ok to="join_extract_relations"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_extract_relations" to="collect_and_save"/>
|
||||
|
||||
<action name="collect_and_save">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Collect Results and Relations and put them in the right path </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkCollectAndSave</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--resultAggregation</arg><arg>${resultAggregation}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Sub-workflow dump complete failed with error message ${wf:errorMessage()}
|
||||
</message>
|
||||
</kill>
|
||||
|
||||
<end name="End" />
|
||||
</workflow-app>
|
|
@ -1,30 +0,0 @@
|
|||
<configuration>
|
||||
<property>
|
||||
<name>jobTracker</name>
|
||||
<value>yarnRM</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nameNode</name>
|
||||
<value>hdfs://nameservice1</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.use.system.libpath</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<value>openaire</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
|
@ -1,2 +0,0 @@
|
|||
## This is a classpath-based import file (this header is required)
|
||||
dump_common classpath eu/dnetlib/dhp/oa/graph/dump/wf/subworkflows/commoncommunityfunder/oozie_app
|
|
@ -1,256 +0,0 @@
|
|||
<workflow-app name="sub_dump_funder_results" xmlns="uri:oozie:workflow:0.5">
|
||||
<parameters>
|
||||
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<description>the source path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<description>the output path</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveJdbcUrl</name>
|
||||
<description>hive server jdbc url</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hiveMetastoreUris</name>
|
||||
<description>hive server metastore URIs</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkDriverMemory</name>
|
||||
<description>memory for driver process</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorMemory</name>
|
||||
<description>memory for individual executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>sparkExecutorCores</name>
|
||||
<description>number of cores used by single executor</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozieActionShareLibForSpark2</name>
|
||||
<description>oozie action sharelib for spark 2.*</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2ExtraListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||
<description>spark 2.* extra listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2SqlQueryExecutionListeners</name>
|
||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||
<description>spark 2.* sql query execution listeners classname</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2YarnHistoryServerAddress</name>
|
||||
<description>spark 2.* yarn history server address</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>spark2EventLogDir</name>
|
||||
<description>spark 2.* event log dir location</description>
|
||||
</property>
|
||||
</parameters>
|
||||
|
||||
<global>
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapreduce.job.queuename</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||
<value>${oozieLauncherQueueName}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>oozie.action.sharelib.for.spark</name>
|
||||
<value>${oozieActionShareLibForSpark2}</value>
|
||||
</property>
|
||||
|
||||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="fork_result_linked_to_projects"/>
|
||||
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
||||
|
||||
<fork name="fork_result_linked_to_projects">
|
||||
<path start="select_publication_linked_to_projects"/>
|
||||
<path start="select_dataset_linked_to_projects"/>
|
||||
<path start="select_orp_linked_to_project"/>
|
||||
<path start="select_software_linked_to_projects"/>
|
||||
</fork>
|
||||
|
||||
<action name="select_publication_linked_to_projects">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump funder results </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
||||
</spark>
|
||||
<ok to="join_link"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="select_dataset_linked_to_projects">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump funder results </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
||||
</spark>
|
||||
<ok to="join_link"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="select_orp_linked_to_project">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump funder results </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
||||
</spark>
|
||||
<ok to="join_link"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<action name="select_software_linked_to_projects">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump funder results </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
||||
</spark>
|
||||
<ok to="join_link"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_link" to="common_action_community_funder"/>
|
||||
|
||||
<action name="common_action_community_funder">
|
||||
<sub-workflow>
|
||||
<app-path>${wf:appPath()}/dump_common
|
||||
</app-path>
|
||||
<propagate-configuration/>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>sourcePath</name>
|
||||
<value>${sourcePath}</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>selectedResults</name>
|
||||
<value>${workingDir}/result</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>communityMapPath</name>
|
||||
<value>${workingDir}/communityMap</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>outputPath</name>
|
||||
<value>${workingDir}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="dump_funder_results" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
<action name="dump_funder_results">
|
||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||
<master>yarn</master>
|
||||
<mode>cluster</mode>
|
||||
<name>Dump funder results </name>
|
||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults</class>
|
||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
||||
<spark-opts>
|
||||
--executor-memory=${sparkExecutorMemory}
|
||||
--executor-cores=${sparkExecutorCores}
|
||||
--driver-memory=${sparkDriverMemory}
|
||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||
</spark-opts>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--graphPath</arg><arg>${sourcePath}</arg>
|
||||
</spark>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
||||
<end name="End"/>
|
||||
|
||||
</workflow-app>
|
|
@ -3,6 +3,7 @@ SELECT
|
|||
po.resporganization AS resporganization,
|
||||
po.participantnumber AS participantnumber,
|
||||
po.contribution AS contribution,
|
||||
po.currency AS currency,
|
||||
NULL AS startdate,
|
||||
NULL AS enddate,
|
||||
false AS inferred,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue