[GRAPH DUMP]Moving Measures #159
|
@ -1,413 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityInstance;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
public class GraphResultMapper implements Serializable {
|
|
||||||
|
|
||||||
public static <E extends eu.dnetlib.dhp.schema.oaf.OafEntity> Result map(
|
|
||||||
E in) {
|
|
||||||
|
|
||||||
CommunityResult out = new CommunityResult();
|
|
||||||
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Result input = (eu.dnetlib.dhp.schema.oaf.Result) in;
|
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> ort = Optional.ofNullable(input.getResulttype());
|
|
||||||
if (ort.isPresent()) {
|
|
||||||
switch (ort.get().getClassid()) {
|
|
||||||
case "publication":
|
|
||||||
Optional<Journal> journal = Optional
|
|
||||||
.ofNullable(((eu.dnetlib.dhp.schema.oaf.Publication) input).getJournal());
|
|
||||||
if (journal.isPresent()) {
|
|
||||||
Journal j = journal.get();
|
|
||||||
Container c = new Container();
|
|
||||||
c.setConferencedate(j.getConferencedate());
|
|
||||||
c.setConferenceplace(j.getConferenceplace());
|
|
||||||
c.setEdition(j.getEdition());
|
|
||||||
c.setEp(j.getEp());
|
|
||||||
c.setIss(j.getIss());
|
|
||||||
c.setIssnLinking(j.getIssnLinking());
|
|
||||||
c.setIssnOnline(j.getIssnOnline());
|
|
||||||
c.setIssnPrinted(j.getIssnPrinted());
|
|
||||||
c.setName(j.getName());
|
|
||||||
c.setSp(j.getSp());
|
|
||||||
c.setVol(j.getVol());
|
|
||||||
out.setContainer(c);
|
|
||||||
out.setType(ModelConstants.PUBLICATION_DEFAULT_RESULTTYPE.getClassname());
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "dataset":
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Dataset id = (eu.dnetlib.dhp.schema.oaf.Dataset) input;
|
|
||||||
Optional.ofNullable(id.getSize()).ifPresent(v -> out.setSize(v.getValue()));
|
|
||||||
Optional.ofNullable(id.getVersion()).ifPresent(v -> out.setVersion(v.getValue()));
|
|
||||||
|
|
||||||
out
|
|
||||||
.setGeolocation(
|
|
||||||
Optional
|
|
||||||
.ofNullable(id.getGeolocation())
|
|
||||||
.map(
|
|
||||||
igl -> igl
|
|
||||||
.stream()
|
|
||||||
.filter(Objects::nonNull)
|
|
||||||
.map(gli -> {
|
|
||||||
GeoLocation gl = new GeoLocation();
|
|
||||||
gl.setBox(gli.getBox());
|
|
||||||
gl.setPlace(gli.getPlace());
|
|
||||||
gl.setPoint(gli.getPoint());
|
|
||||||
return gl;
|
|
||||||
})
|
|
||||||
.collect(Collectors.toList()))
|
|
||||||
.orElse(null));
|
|
||||||
|
|
||||||
out.setType(ModelConstants.DATASET_DEFAULT_RESULTTYPE.getClassname());
|
|
||||||
break;
|
|
||||||
case "software":
|
|
||||||
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Software is = (eu.dnetlib.dhp.schema.oaf.Software) input;
|
|
||||||
Optional
|
|
||||||
.ofNullable(is.getCodeRepositoryUrl())
|
|
||||||
.ifPresent(value -> out.setCodeRepositoryUrl(value.getValue()));
|
|
||||||
Optional
|
|
||||||
.ofNullable(is.getDocumentationUrl())
|
|
||||||
.ifPresent(
|
|
||||||
value -> out
|
|
||||||
.setDocumentationUrl(
|
|
||||||
value
|
|
||||||
.stream()
|
|
||||||
.map(Field::getValue)
|
|
||||||
.collect(Collectors.toList())));
|
|
||||||
|
|
||||||
Optional
|
|
||||||
.ofNullable(is.getProgrammingLanguage())
|
|
||||||
.ifPresent(value -> out.setProgrammingLanguage(value.getClassid()));
|
|
||||||
|
|
||||||
out.setType(ModelConstants.SOFTWARE_DEFAULT_RESULTTYPE.getClassname());
|
|
||||||
break;
|
|
||||||
case "other":
|
|
||||||
|
|
||||||
eu.dnetlib.dhp.schema.oaf.OtherResearchProduct ir = (eu.dnetlib.dhp.schema.oaf.OtherResearchProduct) input;
|
|
||||||
out
|
|
||||||
.setContactgroup(
|
|
||||||
Optional
|
|
||||||
.ofNullable(ir.getContactgroup())
|
|
||||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
|
||||||
.orElse(null));
|
|
||||||
|
|
||||||
out
|
|
||||||
.setContactperson(
|
|
||||||
Optional
|
|
||||||
.ofNullable(ir.getContactperson())
|
|
||||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
|
||||||
.orElse(null));
|
|
||||||
out
|
|
||||||
.setTool(
|
|
||||||
Optional
|
|
||||||
.ofNullable(ir.getTool())
|
|
||||||
.map(value -> value.stream().map(Field::getValue).collect(Collectors.toList()))
|
|
||||||
.orElse(null));
|
|
||||||
|
|
||||||
out.setType(ModelConstants.ORP_DEFAULT_RESULTTYPE.getClassname());
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getAuthor())
|
|
||||||
.ifPresent(
|
|
||||||
ats -> out.setAuthor(ats.stream().map(GraphResultMapper::getAuthor).collect(Collectors.toList())));
|
|
||||||
|
|
||||||
// I do not map Access Right UNKNOWN or OTHER
|
|
||||||
|
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oar = Optional.ofNullable(input.getBestaccessright());
|
|
||||||
if (oar.isPresent()) {
|
|
||||||
if (Constants.accessRightsCoarMap.containsKey(oar.get().getClassid())) {
|
|
||||||
String code = Constants.accessRightsCoarMap.get(oar.get().getClassid());
|
|
||||||
out
|
|
||||||
.setBestaccessright(
|
|
||||||
AccessRight
|
|
||||||
.newInstance(
|
|
||||||
code,
|
|
||||||
Constants.coarCodeLabelMap.get(code),
|
|
||||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<String> contributorList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getContributor())
|
|
||||||
.ifPresent(value -> value.stream().forEach(c -> contributorList.add(c.getValue())));
|
|
||||||
out.setContributor(contributorList);
|
|
||||||
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getCountry())
|
|
||||||
.ifPresent(
|
|
||||||
value -> out
|
|
||||||
.setCountry(
|
|
||||||
value
|
|
||||||
.stream()
|
|
||||||
.map(
|
|
||||||
c -> {
|
|
||||||
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
Country country = new Country();
|
|
||||||
country.setCode(c.getClassid());
|
|
||||||
country.setLabel(c.getClassname());
|
|
||||||
Optional
|
|
||||||
.ofNullable(c.getDataInfo())
|
|
||||||
.ifPresent(
|
|
||||||
provenance -> country
|
|
||||||
.setProvenance(
|
|
||||||
Provenance
|
|
||||||
.newInstance(
|
|
||||||
provenance
|
|
||||||
.getProvenanceaction()
|
|
||||||
.getClassname(),
|
|
||||||
c.getDataInfo().getTrust())));
|
|
||||||
return country;
|
|
||||||
})
|
|
||||||
.filter(Objects::nonNull)
|
|
||||||
.collect(Collectors.toList())));
|
|
||||||
|
|
||||||
final List<String> coverageList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getCoverage())
|
|
||||||
.ifPresent(value -> value.stream().forEach(c -> coverageList.add(c.getValue())));
|
|
||||||
out.setCoverage(coverageList);
|
|
||||||
|
|
||||||
out.setDateofcollection(input.getDateofcollection());
|
|
||||||
|
|
||||||
final List<String> descriptionList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getDescription())
|
|
||||||
.ifPresent(value -> value.forEach(d -> descriptionList.add(d.getValue())));
|
|
||||||
out.setDescription(descriptionList);
|
|
||||||
Optional<Field<String>> oStr = Optional.ofNullable(input.getEmbargoenddate());
|
|
||||||
if (oStr.isPresent()) {
|
|
||||||
out.setEmbargoenddate(oStr.get().getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<String> formatList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getFormat())
|
|
||||||
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
|
||||||
out.setFormat(formatList);
|
|
||||||
out.setId(input.getId());
|
|
||||||
out.setOriginalId(input.getOriginalId());
|
|
||||||
|
|
||||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
|
||||||
.ofNullable(input.getInstance());
|
|
||||||
|
|
||||||
if (oInst.isPresent()) {
|
|
||||||
out
|
|
||||||
.setInstance(
|
|
||||||
oInst.get().stream().map(GraphResultMapper::getInstance).collect(Collectors.toList()));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
|
||||||
if (oL.isPresent()) {
|
|
||||||
eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
|
|
||||||
out.setLanguage(Qualifier.newInstance(language.getClassid(), language.getClassname()));
|
|
||||||
}
|
|
||||||
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
|
||||||
if (oLong.isPresent()) {
|
|
||||||
out.setLastupdatetimestamp(oLong.get());
|
|
||||||
}
|
|
||||||
Optional<List<StructuredProperty>> otitle = Optional.ofNullable(input.getTitle());
|
|
||||||
if (otitle.isPresent()) {
|
|
||||||
List<StructuredProperty> iTitle = otitle
|
|
||||||
.get()
|
|
||||||
.stream()
|
|
||||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("main title"))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
if (!iTitle.isEmpty()) {
|
|
||||||
out.setMaintitle(iTitle.get(0).getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
iTitle = otitle
|
|
||||||
.get()
|
|
||||||
.stream()
|
|
||||||
.filter(t -> t.getQualifier().getClassid().equalsIgnoreCase("subtitle"))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
if (!iTitle.isEmpty()) {
|
|
||||||
out.setSubtitle(iTitle.get(0).getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
List<ControlledField> pids = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getPid())
|
|
||||||
.ifPresent(
|
|
||||||
value -> value
|
|
||||||
.stream()
|
|
||||||
.forEach(
|
|
||||||
p -> pids
|
|
||||||
.add(
|
|
||||||
ControlledField
|
|
||||||
.newInstance(p.getQualifier().getClassid(), p.getValue()))));
|
|
||||||
out.setPid(pids);
|
|
||||||
oStr = Optional.ofNullable(input.getDateofacceptance());
|
|
||||||
if (oStr.isPresent()) {
|
|
||||||
out.setPublicationdate(oStr.get().getValue());
|
|
||||||
}
|
|
||||||
oStr = Optional.ofNullable(input.getPublisher());
|
|
||||||
if (oStr.isPresent()) {
|
|
||||||
out.setPublisher(oStr.get().getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
List<String> sourceList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getSource())
|
|
||||||
.ifPresent(value -> value.stream().forEach(s -> sourceList.add(s.getValue())));
|
|
||||||
// out.setSource(input.getSource().stream().map(s -> s.getValue()).collect(Collectors.toList()));
|
|
||||||
List<Subject> subjectList = new ArrayList<>();
|
|
||||||
Optional
|
|
||||||
.ofNullable(input.getSubject())
|
|
||||||
.ifPresent(
|
|
||||||
value -> value
|
|
||||||
.forEach(s -> subjectList.add(getSubject(s))));
|
|
||||||
|
|
||||||
out.setSubjects(subjectList);
|
|
||||||
|
|
||||||
out.setType(input.getResulttype().getClassid());
|
|
||||||
}
|
|
||||||
|
|
||||||
out
|
|
||||||
.setCollectedfrom(
|
|
||||||
input
|
|
||||||
.getCollectedfrom()
|
|
||||||
.stream()
|
|
||||||
.map(cf -> KeyValue.newInstance(cf.getKey(), cf.getValue()))
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
|
|
||||||
return out;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static CommunityInstance getInstance(eu.dnetlib.dhp.schema.oaf.Instance i) {
|
|
||||||
CommunityInstance instance = new CommunityInstance();
|
|
||||||
|
|
||||||
setCommonValue(i, instance);
|
|
||||||
|
|
||||||
instance
|
|
||||||
.setCollectedfrom(
|
|
||||||
KeyValue
|
|
||||||
.newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
|
|
||||||
|
|
||||||
instance
|
|
||||||
.setHostedby(
|
|
||||||
KeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
|
|
||||||
|
|
||||||
return instance;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static <I extends Instance> void setCommonValue(eu.dnetlib.dhp.schema.oaf.Instance i, I instance) {
|
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> opAr = Optional
|
|
||||||
.ofNullable(i.getAccessright());
|
|
||||||
if (opAr.isPresent()) {
|
|
||||||
if (Constants.accessRightsCoarMap.containsKey(opAr.get().getClassid())) {
|
|
||||||
String code = Constants.accessRightsCoarMap.get(opAr.get().getClassid());
|
|
||||||
instance
|
|
||||||
.setAccessright(
|
|
||||||
AccessRight
|
|
||||||
.newInstance(
|
|
||||||
code,
|
|
||||||
Constants.coarCodeLabelMap.get(code),
|
|
||||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Optional
|
|
||||||
.ofNullable(i.getLicense())
|
|
||||||
.ifPresent(value -> instance.setLicense(value.getValue()));
|
|
||||||
Optional
|
|
||||||
.ofNullable(i.getDateofacceptance())
|
|
||||||
.ifPresent(value -> instance.setPublicationdate(value.getValue()));
|
|
||||||
Optional
|
|
||||||
.ofNullable(i.getRefereed())
|
|
||||||
.ifPresent(value -> instance.setRefereed(value.getClassname()));
|
|
||||||
Optional
|
|
||||||
.ofNullable(i.getInstancetype())
|
|
||||||
.ifPresent(value -> instance.setType(value.getClassname()));
|
|
||||||
Optional.ofNullable(i.getUrl()).ifPresent(value -> instance.setUrl(value));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Subject getSubject(StructuredProperty s) {
|
|
||||||
Subject subject = new Subject();
|
|
||||||
subject.setSubject(ControlledField.newInstance(s.getQualifier().getClassid(), s.getValue()));
|
|
||||||
Optional<DataInfo> di = Optional.ofNullable(s.getDataInfo());
|
|
||||||
if (di.isPresent()) {
|
|
||||||
Provenance p = new Provenance();
|
|
||||||
p.setProvenance(di.get().getProvenanceaction().getClassname());
|
|
||||||
p.setTrust(di.get().getTrust());
|
|
||||||
subject.setProvenance(p);
|
|
||||||
}
|
|
||||||
|
|
||||||
return subject;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Author getAuthor(eu.dnetlib.dhp.schema.oaf.Author oa) {
|
|
||||||
Author a = new Author();
|
|
||||||
a.setFullname(oa.getFullname());
|
|
||||||
a.setName(oa.getName());
|
|
||||||
a.setSurname(oa.getSurname());
|
|
||||||
a.setRank(oa.getRank());
|
|
||||||
|
|
||||||
Optional<List<StructuredProperty>> oPids = Optional
|
|
||||||
.ofNullable(oa.getPid());
|
|
||||||
if (oPids.isPresent()) {
|
|
||||||
Pid pid = getOrcid(oPids.get());
|
|
||||||
if (pid != null) {
|
|
||||||
a.setPid(pid);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return a;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Pid getOrcid(List<StructuredProperty> p) {
|
|
||||||
for (StructuredProperty pid : p) {
|
|
||||||
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID)) {
|
|
||||||
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
|
||||||
if (di.isPresent()) {
|
|
||||||
return Pid
|
|
||||||
.newInstance(
|
|
||||||
ControlledField
|
|
||||||
.newInstance(
|
|
||||||
pid.getQualifier().getClassid(),
|
|
||||||
pid.getValue()),
|
|
||||||
Provenance
|
|
||||||
.newInstance(
|
|
||||||
di.get().getProvenanceaction().getClassname(),
|
|
||||||
di.get().getTrust()));
|
|
||||||
} else {
|
|
||||||
return Pid
|
|
||||||
.newInstance(
|
|
||||||
ControlledField
|
|
||||||
.newInstance(
|
|
||||||
pid.getQualifier().getClassid(),
|
|
||||||
pid.getValue())
|
|
||||||
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -16,5 +16,5 @@ It defines [mappings](mappings.md) for transformation of different datasource (S
|
||||||
|
|
||||||
### Integration of external information in the result
|
### Integration of external information in the result
|
||||||
|
|
||||||
The workflows create new entity in the OpenAIRE format (OAF) which aim is to enrich the result already contained in the graph.
|
The workflows create new entity in the OpenAIRE format (OAF) whose aim is to enrich the result already contained in the graph.
|
||||||
See integration section for more insight
|
See integration section for more insight
|
||||||
|
|
|
@ -9,7 +9,7 @@ The information integrated so far is:
|
||||||
1. Article impact measures
|
1. Article impact measures
|
||||||
1. [Bip!Finder](https://dl.acm.org/doi/10.1145/3357384.3357850) scores
|
1. [Bip!Finder](https://dl.acm.org/doi/10.1145/3357384.3357850) scores
|
||||||
2. Result Subjects
|
2. Result Subjects
|
||||||
1. Integration of Fields od Science and Techonology ([FOS](https://www.qnrf.org/en-us/FOS)) classification in
|
1. Integration of Fields of Science and Techonology ([FOS](https://www.qnrf.org/en-us/FOS)) classification in
|
||||||
results subjects.
|
results subjects.
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ and the element in the OAF model that should be used to map the information we w
|
||||||
|
|
||||||
The id is set by using a particular encoding of the given PID
|
The id is set by using a particular encoding of the given PID
|
||||||
|
|
||||||
*unresolved:[pid]:[pidtype]*
|
*unresolved::[pid]::[pidtype]*
|
||||||
|
|
||||||
where
|
where
|
||||||
|
|
||||||
|
@ -31,6 +31,6 @@ Such entities are matched against those available in the graph using the result.
|
||||||
This mechanism can be used to integrate enrichments produced as associated by a given PID.
|
This mechanism can be used to integrate enrichments produced as associated by a given PID.
|
||||||
If a match will be found with one of the results already in the graph that said result will be enriched with the information
|
If a match will be found with one of the results already in the graph that said result will be enriched with the information
|
||||||
present in the new OAF.
|
present in the new OAF.
|
||||||
All the objects for which a match is not found are discarded.
|
All the entities for which a match is not found are discarded.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,13 +12,12 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
import eu.dnetlib.dhp.schema.dump.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.AccessRight;
|
import eu.dnetlib.dhp.schema.dump.oaf.AccessRight;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Author;
|
import eu.dnetlib.dhp.schema.dump.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Country;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.GeoLocation;
|
import eu.dnetlib.dhp.schema.dump.oaf.GeoLocation;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
|
import eu.dnetlib.dhp.schema.dump.oaf.Instance;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.KeyValue;
|
import eu.dnetlib.dhp.schema.dump.oaf.Measure;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
|
import eu.dnetlib.dhp.schema.dump.oaf.OpenAccessRoute;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Qualifier;
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.Result;
|
import eu.dnetlib.dhp.schema.dump.oaf.Result;
|
||||||
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CfHbKeyValue;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityInstance;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityInstance;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
import eu.dnetlib.dhp.schema.dump.oaf.community.Context;
|
||||||
|
@ -56,7 +55,8 @@ public class ResultMapper implements Serializable {
|
||||||
String code = Constants.accessRightsCoarMap.get(oar.get().getClassid());
|
String code = Constants.accessRightsCoarMap.get(oar.get().getClassid());
|
||||||
out
|
out
|
||||||
.setBestaccessright(
|
.setBestaccessright(
|
||||||
AccessRight
|
|
||||||
|
BestAccessRight
|
||||||
.newInstance(
|
.newInstance(
|
||||||
code,
|
code,
|
||||||
Constants.coarCodeLabelMap.get(code),
|
Constants.coarCodeLabelMap.get(code),
|
||||||
|
@ -81,7 +81,7 @@ public class ResultMapper implements Serializable {
|
||||||
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
if (c.getClassid().equals((ModelConstants.UNKNOWN))) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
Country country = new Country();
|
ResultCountry country = new ResultCountry();
|
||||||
country.setCode(c.getClassid());
|
country.setCode(c.getClassid());
|
||||||
country.setLabel(c.getClassname());
|
country.setLabel(c.getClassname());
|
||||||
Optional
|
Optional
|
||||||
|
@ -124,7 +124,17 @@ public class ResultMapper implements Serializable {
|
||||||
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
.ifPresent(value -> value.stream().forEach(f -> formatList.add(f.getValue())));
|
||||||
out.setFormat(formatList);
|
out.setFormat(formatList);
|
||||||
out.setId(input.getId());
|
out.setId(input.getId());
|
||||||
out.setOriginalId(input.getOriginalId());
|
out.setOriginalId(new ArrayList<>());
|
||||||
|
Optional
|
||||||
|
.ofNullable(input.getOriginalId())
|
||||||
|
.ifPresent(
|
||||||
|
v -> out
|
||||||
|
.setOriginalId(
|
||||||
|
input
|
||||||
|
.getOriginalId()
|
||||||
|
.stream()
|
||||||
|
.filter(s -> !s.startsWith("50|"))
|
||||||
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
Optional<List<eu.dnetlib.dhp.schema.oaf.Instance>> oInst = Optional
|
||||||
.ofNullable(input.getInstance());
|
.ofNullable(input.getInstance());
|
||||||
|
@ -148,7 +158,7 @@ public class ResultMapper implements Serializable {
|
||||||
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> oL = Optional.ofNullable(input.getLanguage());
|
||||||
if (oL.isPresent()) {
|
if (oL.isPresent()) {
|
||||||
eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
|
eu.dnetlib.dhp.schema.oaf.Qualifier language = oL.get();
|
||||||
out.setLanguage(Qualifier.newInstance(language.getClassid(), language.getClassname()));
|
out.setLanguage(Language.newInstance(language.getClassid(), language.getClassname()));
|
||||||
}
|
}
|
||||||
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
Optional<Long> oLong = Optional.ofNullable(input.getLastupdatetimestamp());
|
||||||
if (oLong.isPresent()) {
|
if (oLong.isPresent()) {
|
||||||
|
@ -184,7 +194,7 @@ public class ResultMapper implements Serializable {
|
||||||
value
|
value
|
||||||
.stream()
|
.stream()
|
||||||
.map(
|
.map(
|
||||||
p -> ControlledField
|
p -> ResultPid
|
||||||
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
.collect(Collectors.toList())));
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
|
@ -219,7 +229,7 @@ public class ResultMapper implements Serializable {
|
||||||
input
|
input
|
||||||
.getCollectedfrom()
|
.getCollectedfrom()
|
||||||
.stream()
|
.stream()
|
||||||
.map(cf -> KeyValue.newInstance(cf.getKey(), cf.getValue()))
|
.map(cf -> CfHbKeyValue.newInstance(cf.getKey(), cf.getValue()))
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
|
|
||||||
Set<String> communities = communityMap.keySet();
|
Set<String> communities = communityMap.keySet();
|
||||||
|
@ -412,12 +422,12 @@ public class ResultMapper implements Serializable {
|
||||||
|
|
||||||
instance
|
instance
|
||||||
.setCollectedfrom(
|
.setCollectedfrom(
|
||||||
KeyValue
|
CfHbKeyValue
|
||||||
.newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
|
.newInstance(i.getCollectedfrom().getKey(), i.getCollectedfrom().getValue()));
|
||||||
|
|
||||||
instance
|
instance
|
||||||
.setHostedby(
|
.setHostedby(
|
||||||
KeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
|
CfHbKeyValue.newInstance(i.getHostedby().getKey(), i.getHostedby().getValue()));
|
||||||
|
|
||||||
return instance;
|
return instance;
|
||||||
|
|
||||||
|
@ -436,6 +446,17 @@ public class ResultMapper implements Serializable {
|
||||||
code,
|
code,
|
||||||
Constants.coarCodeLabelMap.get(code),
|
Constants.coarCodeLabelMap.get(code),
|
||||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||||
|
|
||||||
|
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
||||||
|
if (mes.isPresent()) {
|
||||||
|
List<Measure> measure = new ArrayList<>();
|
||||||
|
mes
|
||||||
|
.get()
|
||||||
|
.forEach(
|
||||||
|
m -> m.getUnit().forEach(u -> measure.add(Measure.newInstance(m.getId(), u.getValue()))));
|
||||||
|
instance.setMeasures(measure);
|
||||||
|
}
|
||||||
|
|
||||||
if (opAr.get().getOpenAccessRoute() != null) {
|
if (opAr.get().getOpenAccessRoute() != null) {
|
||||||
switch (opAr.get().getOpenAccessRoute()) {
|
switch (opAr.get().getOpenAccessRoute()) {
|
||||||
case hybrid:
|
case hybrid:
|
||||||
|
@ -463,7 +484,7 @@ public class ResultMapper implements Serializable {
|
||||||
.setPid(
|
.setPid(
|
||||||
pid
|
pid
|
||||||
.stream()
|
.stream()
|
||||||
.map(p -> ControlledField.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
.map(p -> ResultPid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
.collect(Collectors.toList())));
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
|
@ -473,7 +494,7 @@ public class ResultMapper implements Serializable {
|
||||||
.setAlternateIdentifier(
|
.setAlternateIdentifier(
|
||||||
ai
|
ai
|
||||||
.stream()
|
.stream()
|
||||||
.map(p -> ControlledField.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
.map(p -> AlternateIdentifier.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
.collect(Collectors.toList())));
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
|
@ -553,7 +574,7 @@ public class ResultMapper implements Serializable {
|
||||||
|
|
||||||
private static Subject getSubject(StructuredProperty s) {
|
private static Subject getSubject(StructuredProperty s) {
|
||||||
Subject subject = new Subject();
|
Subject subject = new Subject();
|
||||||
subject.setSubject(ControlledField.newInstance(s.getQualifier().getClassid(), s.getValue()));
|
subject.setSubject(SubjectSchemeValue.newInstance(s.getQualifier().getClassid(), s.getValue()));
|
||||||
Optional<DataInfo> di = Optional.ofNullable(s.getDataInfo());
|
Optional<DataInfo> di = Optional.ofNullable(s.getDataInfo());
|
||||||
if (di.isPresent()) {
|
if (di.isPresent()) {
|
||||||
Provenance p = new Provenance();
|
Provenance p = new Provenance();
|
||||||
|
@ -575,7 +596,7 @@ public class ResultMapper implements Serializable {
|
||||||
Optional<List<StructuredProperty>> oPids = Optional
|
Optional<List<StructuredProperty>> oPids = Optional
|
||||||
.ofNullable(oa.getPid());
|
.ofNullable(oa.getPid());
|
||||||
if (oPids.isPresent()) {
|
if (oPids.isPresent()) {
|
||||||
Pid pid = getOrcid(oPids.get());
|
AuthorPid pid = getOrcid(oPids.get());
|
||||||
if (pid != null) {
|
if (pid != null) {
|
||||||
a.setPid(pid);
|
a.setPid(pid);
|
||||||
}
|
}
|
||||||
|
@ -584,12 +605,12 @@ public class ResultMapper implements Serializable {
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Pid getAuthorPid(StructuredProperty pid) {
|
private static AuthorPid getAuthorPid(StructuredProperty pid) {
|
||||||
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
Optional<DataInfo> di = Optional.ofNullable(pid.getDataInfo());
|
||||||
if (di.isPresent()) {
|
if (di.isPresent()) {
|
||||||
return Pid
|
return AuthorPid
|
||||||
.newInstance(
|
.newInstance(
|
||||||
ControlledField
|
AuthorPidSchemeValue
|
||||||
.newInstance(
|
.newInstance(
|
||||||
pid.getQualifier().getClassid(),
|
pid.getQualifier().getClassid(),
|
||||||
pid.getValue()),
|
pid.getValue()),
|
||||||
|
@ -598,9 +619,9 @@ public class ResultMapper implements Serializable {
|
||||||
di.get().getProvenanceaction().getClassname(),
|
di.get().getProvenanceaction().getClassname(),
|
||||||
di.get().getTrust()));
|
di.get().getTrust()));
|
||||||
} else {
|
} else {
|
||||||
return Pid
|
return AuthorPid
|
||||||
.newInstance(
|
.newInstance(
|
||||||
ControlledField
|
AuthorPidSchemeValue
|
||||||
.newInstance(
|
.newInstance(
|
||||||
pid.getQualifier().getClassid(),
|
pid.getQualifier().getClassid(),
|
||||||
pid.getValue())
|
pid.getValue())
|
||||||
|
@ -609,7 +630,7 @@ public class ResultMapper implements Serializable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Pid getOrcid(List<StructuredProperty> p) {
|
private static AuthorPid getOrcid(List<StructuredProperty> p) {
|
||||||
List<StructuredProperty> pidList = p.stream().map(pid -> {
|
List<StructuredProperty> pidList = p.stream().map(pid -> {
|
||||||
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
|
if (pid.getQualifier().getClassid().equals(ModelConstants.ORCID) ||
|
||||||
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))) {
|
(pid.getQualifier().getClassid().equals(ModelConstants.ORCID_PENDING))) {
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class CreateContextEntities implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
CreateContextEntities.class
|
CreateContextEntities.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_entity_parameter.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_entity_parameter.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class CreateContextRelation implements Serializable {
|
||||||
.requireNonNull(
|
.requireNonNull(
|
||||||
CreateContextRelation.class
|
CreateContextRelation.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_entity_parameter.json")));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_entity_parameter.json")));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||||
|
|
||||||
import static com.jayway.jsonpath.Filter.filter;
|
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
@ -11,9 +10,7 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.ForeachFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
@ -22,8 +19,6 @@ import org.dom4j.DocumentException;
|
||||||
import org.dom4j.Node;
|
import org.dom4j.Node;
|
||||||
import org.dom4j.io.SAXReader;
|
import org.dom4j.io.SAXReader;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
|
||||||
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
import eu.dnetlib.dhp.oa.graph.dump.Utils;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
@ -133,14 +128,14 @@ public class DumpGraphEntities implements Serializable {
|
||||||
.ifPresent(
|
.ifPresent(
|
||||||
pids -> pids
|
pids -> pids
|
||||||
.stream()
|
.stream()
|
||||||
.map(p -> ControlledField.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
.map(p -> DatasourcePid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toList()));
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(d.getDatasourcetype())
|
.ofNullable(d.getDatasourcetype())
|
||||||
.ifPresent(
|
.ifPresent(
|
||||||
dsType -> datasource
|
dsType -> datasource
|
||||||
.setDatasourcetype(ControlledField.newInstance(dsType.getClassid(), dsType.getClassname())));
|
.setDatasourcetype(DatasourceSchemeValue.newInstance(dsType.getClassid(), dsType.getClassname())));
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(d.getOpenairecompatibility())
|
.ofNullable(d.getOpenairecompatibility())
|
||||||
|
@ -499,7 +494,7 @@ public class DumpGraphEntities implements Serializable {
|
||||||
.ifPresent(
|
.ifPresent(
|
||||||
value -> {
|
value -> {
|
||||||
if (!value.getClassid().equals(Constants.UNKNOWN)) {
|
if (!value.getClassid().equals(Constants.UNKNOWN)) {
|
||||||
organization.setCountry(Qualifier.newInstance(value.getClassid(), value.getClassname()));
|
organization.setCountry(Country.newInstance(value.getClassid(), value.getClassname()));
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -515,7 +510,7 @@ public class DumpGraphEntities implements Serializable {
|
||||||
.setPid(
|
.setPid(
|
||||||
value
|
value
|
||||||
.stream()
|
.stream()
|
||||||
.map(p -> ControlledField.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
.map(p -> OrganizationPid.newInstance(p.getQualifier().getClassid(), p.getValue()))
|
||||||
.collect(Collectors.toList())));
|
.collect(Collectors.toList())));
|
||||||
|
|
||||||
return organization;
|
return organization;
|
||||||
|
|
|
@ -31,7 +31,7 @@ public class SparkCollectAndSave implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
SparkCollectAndSave.class
|
SparkCollectAndSave.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_collect_and_save.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_collect_and_save.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -22,7 +22,7 @@ public class SparkDumpEntitiesJob implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
SparkDumpEntitiesJob.class
|
SparkDumpEntitiesJob.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -4,10 +4,11 @@ package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Optional;
|
import java.util.*;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
@ -37,7 +38,7 @@ public class SparkDumpRelationJob implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
SparkDumpRelationJob.class
|
SparkDumpRelationJob.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_relationdump_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
@ -54,6 +55,12 @@ public class SparkDumpRelationJob implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
Optional<String> rs = Optional.ofNullable(parser.get("removeSet"));
|
||||||
|
final Set<String> removeSet = new HashSet<>();
|
||||||
|
if (rs.isPresent()) {
|
||||||
|
Collections.addAll(removeSet, rs.get().split(";"));
|
||||||
|
}
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -61,15 +68,16 @@ public class SparkDumpRelationJob implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Utils.removeOutputDir(spark, outputPath);
|
Utils.removeOutputDir(spark, outputPath);
|
||||||
dumpRelation(spark, inputPath, outputPath);
|
dumpRelation(spark, inputPath, outputPath, removeSet);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath) {
|
private static void dumpRelation(SparkSession spark, String inputPath, String outputPath, Set<String> removeSet) {
|
||||||
Dataset<Relation> relations = Utils.readPath(spark, inputPath, Relation.class);
|
Dataset<Relation> relations = Utils.readPath(spark, inputPath, Relation.class);
|
||||||
relations
|
relations
|
||||||
|
.filter((FilterFunction<Relation>) r -> !removeSet.contains(r.getRelClass()))
|
||||||
.map((MapFunction<Relation, eu.dnetlib.dhp.schema.dump.oaf.graph.Relation>) relation -> {
|
.map((MapFunction<Relation, eu.dnetlib.dhp.schema.dump.oaf.graph.Relation>) relation -> {
|
||||||
eu.dnetlib.dhp.schema.dump.oaf.graph.Relation relNew = new eu.dnetlib.dhp.schema.dump.oaf.graph.Relation();
|
eu.dnetlib.dhp.schema.dump.oaf.graph.Relation relNew = new eu.dnetlib.dhp.schema.dump.oaf.graph.Relation();
|
||||||
relNew
|
relNew
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class SparkOrganizationRelation implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
SparkOrganizationRelation.class
|
SparkOrganizationRelation.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_organization_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_organization_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -35,7 +35,7 @@ public class SparkSelectValidRelationsJob implements Serializable {
|
||||||
.toString(
|
.toString(
|
||||||
SparkSelectValidRelationsJob.class
|
SparkSelectValidRelationsJob.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/oa/graph/dump/complete/input_relationdump_parameters.json"));
|
"/eu/dnetlib/dhp/oa/graph/dump/input_relationdump_parameters.json"));
|
||||||
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<value>openaire</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,431 +0,0 @@
|
||||||
<workflow-app name="dump_community_products" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
|
||||||
<description>the source path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookUpUrl</name>
|
|
||||||
<description>the isLookup service endpoint</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<description>the output path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>accessToken</name>
|
|
||||||
<description>the access token used for the deposition in Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>connectionUrl</name>
|
|
||||||
<description>the connection url for Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>metadata</name>
|
|
||||||
<description> the metadata associated to the deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>depositionType</name>
|
|
||||||
<description>one among {new, update, version}</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>conceptRecordId</name>
|
|
||||||
<description>for new version, the id of the record for the old deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<description>the target hive database name</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<description>hive server jdbc url</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<description>hive server metastore URIs</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="reset_outputpath"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="reset_outputpath">
|
|
||||||
<fs>
|
|
||||||
<delete path="${outputPath}"/>
|
|
||||||
<mkdir path="${outputPath}"/>
|
|
||||||
</fs>
|
|
||||||
<ok to="save_community_map"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="save_community_map">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SaveCommunityMap</main-class>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="fork_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_dump">
|
|
||||||
<path start="dump_publication"/>
|
|
||||||
<path start="dump_dataset"/>
|
|
||||||
<path start="dump_orp"/>
|
|
||||||
<path start="dump_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="dump_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table publication for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/publication</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table dataset for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dataset</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table ORP for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table software for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/software</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_dump" to="prepareResultProject"/>
|
|
||||||
|
|
||||||
<action name="prepareResultProject">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Prepare association result subset of project info</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="fork_extendWithProject"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_extendWithProject">
|
|
||||||
<path start="extend_publication"/>
|
|
||||||
<path start="extend_dataset"/>
|
|
||||||
<path start="extend_orp"/>
|
|
||||||
<path start="extend_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="extend_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped publications with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/publication</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped dataset with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/dataset</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped ORP with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/orp</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped software with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/software</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_extend" to="splitForCommunities"/>
|
|
||||||
|
|
||||||
<action name="splitForCommunities">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Split dumped result for community</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkSplitForCommunity</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/split</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="make_archive"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="make_archive">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/split</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="send_zenodo"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="send_zenodo">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
|
||||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
|
||||||
<arg>--metadata</arg><arg>${metadata}</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
|
||||||
<arg>--depositionId</arg><arg>${depositionId}</arg>
|
|
||||||
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
|
|
||||||
</workflow-app>
|
|
|
@ -1,30 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<value>openaire</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,586 +0,0 @@
|
||||||
<workflow-app name="dump_complete_graph" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
|
||||||
<description>the source path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookUpUrl</name>
|
|
||||||
<description>the isLookup service endpoint</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<description>the output path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>resultAggregation</name>
|
|
||||||
<description>true if all the result type have to be dumped under result. false otherwise</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>accessToken</name>
|
|
||||||
<description>the access token used for the deposition in Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>connectionUrl</name>
|
|
||||||
<description>the connection url for Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>metadata</name>
|
|
||||||
<description> the metadata associated to the deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>depositionType</name>
|
|
||||||
<description>the type of deposition we want to perform. "new" for brand new deposition, "version" for a new version of a published deposition (in this case the concept record id must be provided), "upload" to upload content to an open deposition for which we already have the deposition id (in this case the deposition id should be provided)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>conceptRecordId</name>
|
|
||||||
<description>for new version, the id of the record for the old deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>depositionId</name>
|
|
||||||
<description>the depositionId of a deposition open that has to be added content</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>organizationCommunityMap</name>
|
|
||||||
<description>the organization community map</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<description>the target hive database name</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<description>hive server jdbc url</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<description>hive server metastore URIs</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="reset_outputpath"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="reset_outputpath">
|
|
||||||
<fs>
|
|
||||||
<delete path="${outputPath}"/>
|
|
||||||
<mkdir path="${outputPath}"/>
|
|
||||||
</fs>
|
|
||||||
<ok to="save_community_map"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="save_community_map">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SaveCommunityMap</main-class>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="fork_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_dump">
|
|
||||||
<path start="dump_publication"/>
|
|
||||||
<path start="dump_dataset"/>
|
|
||||||
<path start="dump_orp"/>
|
|
||||||
<path start="dump_software"/>
|
|
||||||
<path start="dump_organization"/>
|
|
||||||
<path start="dump_project"/>
|
|
||||||
<path start="dump_datasource"/>
|
|
||||||
<path start="dump_relation"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="dump_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table publication </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table dataset </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table ORP </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table software </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_organization">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table organization </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/organization</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/collect/organization</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_project">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table project </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/project</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/collect/project</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_datasource">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table datasource </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpEntitiesJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/datasource</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/collect/datasource</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_relation">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table relation </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkDumpRelationJob</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/relation</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_dump" to="fork_context"/>
|
|
||||||
|
|
||||||
<fork name="fork_context">
|
|
||||||
<path start="create_entities_fromcontext"/>
|
|
||||||
<path start="create_relation_fromcontext"/>
|
|
||||||
<path start="create_relation_fromorgs"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="create_entities_fromcontext">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextEntities</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${workingDir}/collect/communities_infrastructures/communities_infrastructure.json.gz</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="join_context"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="create_relation_fromcontext">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.complete.CreateContextRelation</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${workingDir}/relation/context</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="join_context"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="create_relation_fromorgs">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table relation </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkOrganizationRelation</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/contextOrg</arg>
|
|
||||||
<arg>--organizationCommunityMap</arg><arg>${organizationCommunityMap}</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_context"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_context" to="fork_extract_relations"/>
|
|
||||||
|
|
||||||
<fork name="fork_extract_relations">
|
|
||||||
<path start="rels_from_pubs"/>
|
|
||||||
<path start="rels_from_dats"/>
|
|
||||||
<path start="rels_from_orp"/>
|
|
||||||
<path start="rels_from_sw"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="rels_from_pubs">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extract Relations from publication </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/publication</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extract_relations"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="rels_from_dats">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table dataset </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/dataset</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extract_relations"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="rels_from_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table ORP </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/orp</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extract_relations"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="rels_from_sw">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table software </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkExtractRelationFromEntities</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/software</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extract_relations"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_extract_relations" to="collect_and_save"/>
|
|
||||||
|
|
||||||
<action name="collect_and_save">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Collect Results and Relations and put them in the right path </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.complete.SparkCollectAndSave</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/collect</arg>
|
|
||||||
<arg>--resultAggregation</arg><arg>${resultAggregation}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="make_archive"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="make_archive">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/collect</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="send_zenodo"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="send_zenodo">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
|
||||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
|
||||||
<arg>--metadata</arg><arg>${metadata}</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
|
||||||
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
|
||||||
<arg>--depositionId</arg><arg>${depositionId}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
|
|
||||||
</workflow-app>
|
|
|
@ -1,37 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of the research community/research infrastructure"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE id of the research community/research infrastructure"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The long name of the community"
|
|
||||||
},
|
|
||||||
"originalId": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The acronym of the community"
|
|
||||||
},
|
|
||||||
"subject": {
|
|
||||||
"description": "Only for research communities: the list of the subjects associated to the research community",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "One of {Research Community, Research infrastructure}"
|
|
||||||
},
|
|
||||||
"zenodo_community": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The URL of the Zenodo community associated to the Research community/Research infrastructure"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,192 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"definitions": {
|
|
||||||
"ControlledField": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"scheme": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "To represent the information described by a scheme and a value in that scheme (i.e. pid)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"accessrights": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Type of access to the data source, as defined by re3data.org. Possible values: {open, restricted, closed}"
|
|
||||||
},
|
|
||||||
"certificates": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The certificate, seal or standard the data source complies with. As defined by re3data.org."
|
|
||||||
},
|
|
||||||
"citationguidelineurl": {
|
|
||||||
"type": "string",
|
|
||||||
"description":"The URL of the data source providing information on how to cite its items. As defined by re3data.org."
|
|
||||||
},
|
|
||||||
"contenttypes": {
|
|
||||||
"description": "Types of content in the data source, as defined by OpenDOAR",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"databaseaccessrestriction": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Access restrinctions to the data source, as defined by re3data.org. One of {feeRequired, registration, other}"
|
|
||||||
},
|
|
||||||
"datasourcetype": {
|
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/ControlledField"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "The type of the datasource. See https://api.openaire.eu/vocabularies/dnet:datasource_typologies"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"datauploadrestriction": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Upload restrictions applied by the datasource, as defined by re3data.org. One of {feeRequired, registration, other}"
|
|
||||||
},
|
|
||||||
"dateofvalidation": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The date of last validation against the OpenAIRE guidelines for the datasource records"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"englishname": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The English name of the datasource"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The OpenAIRE id of the data source"
|
|
||||||
},
|
|
||||||
"journal": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"conferencedate": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"conferenceplace": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edition": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"ep": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "End page"
|
|
||||||
},
|
|
||||||
"iss": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Issue number"
|
|
||||||
},
|
|
||||||
"issnLinking": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"issnOnline": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"issnPrinted": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"sp": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Start page"
|
|
||||||
},
|
|
||||||
"vol": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Volume"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Information about the journal, if this data source is of type Journal."
|
|
||||||
},
|
|
||||||
"languages": {
|
|
||||||
"description": "The languages present in the data source's content, as defined by OpenDOAR.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"logourl": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"missionstatementurl": {
|
|
||||||
"type": "string",
|
|
||||||
"description":"The URL of a mission statement describing the designated community of the data source. As defined by re3data.org"
|
|
||||||
},
|
|
||||||
"officialname": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The official name of the datasource"
|
|
||||||
},
|
|
||||||
"openairecompatibility": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE guidelines the data source comply with. See also https://guidelines.openaire.eu."
|
|
||||||
},
|
|
||||||
"originalId": {
|
|
||||||
"description": "Original identifiers for the datasource"
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"description": "Persistent identifiers of the datasource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/ControlledField"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pidsystems": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The persistent identifier system that is used by the data source. As defined by re3data.org"
|
|
||||||
},
|
|
||||||
"policies": {
|
|
||||||
"description": "Policies of the data source, as defined in OpenDOAR.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"releaseenddate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Date when the data source went offline or stopped ingesting new research data. As defined by re3data.org"
|
|
||||||
},
|
|
||||||
"releasestartdate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Releasing date of the data source, as defined by re3data.org"
|
|
||||||
},
|
|
||||||
"subjects": {
|
|
||||||
"description": "List of subjects associated to the datasource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"uploadrights": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Type of data upload. As defined by re3data.org: one of {open, restricted,closed}"
|
|
||||||
},
|
|
||||||
"versioning": {
|
|
||||||
"type": "boolean",
|
|
||||||
"description": "As defined by redata.org: 'yes' if the data source supports versioning, 'no' otherwise."
|
|
||||||
},
|
|
||||||
"websiteurl": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,57 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"alternativenames": {
|
|
||||||
"description": "Alternative names that identify the organisation",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"country": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The organisation country code"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The organisation country label"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "The country of the organisation"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The OpenAIRE id for the organisation"
|
|
||||||
},
|
|
||||||
"legalname": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"legalshortname": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"description": "Persistent identifiers for the organisation i.e. isni 0000000090326370",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"scheme": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The scheme of the identifier (i.e. isni)"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "the value in the schema (i.e. 0000000090326370)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"websiteurl": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,119 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"acronym": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"callidentifier": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The grant agreement number"
|
|
||||||
},
|
|
||||||
"enddate": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"description": "Funding information for the project",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"funding_stream": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of the funding stream"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Id of the funding stream"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"jurisdiction": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The jurisdiction of the funder (i.e. EU)"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The name of the funder (European Commission)"
|
|
||||||
},
|
|
||||||
"shortName": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The short name of the funder (EC)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"granted": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"currency": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The currency of the granted amount (e.g. EUR)"
|
|
||||||
},
|
|
||||||
"fundedamount": {
|
|
||||||
"type": "number",
|
|
||||||
"description": "The funded amount"
|
|
||||||
},
|
|
||||||
"totalcost": {
|
|
||||||
"type": "number",
|
|
||||||
"description": "The total cost of the project"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "The money granted to the project"
|
|
||||||
},
|
|
||||||
"h2020programme": {
|
|
||||||
"description": "The h2020 programme funding the project",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The code of the programme"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The description of the programme"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE id for the project"
|
|
||||||
},
|
|
||||||
"keywords": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"openaccessmandatefordataset": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"openaccessmandateforpublications": {
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"startdate": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"subject": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"summary": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"title": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"websiteurl": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,60 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"definitions": {
|
|
||||||
"Node": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The OpenAIRE id of the entity"
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The type of the entity (i.e. organisation)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The reason why OpenAIRE holds the relation "
|
|
||||||
},
|
|
||||||
"trust": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The trust of the relation in the range of [0,1]. Where greater the number, more the trust. Harvested relationships have typically a high trust (0.9). The trust of inferred relationship is calculated by the inference algorithm that generated them, as described in https://graph.openaire.eu/about#architecture (Enrichment --> Mining)"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"reltype": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The semantics of the relation (i.e. isAuthorInstitutionOf). "
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "the type of the relation (i.e. affiliation)"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "To represent the semantics of a relation between two entities"
|
|
||||||
},
|
|
||||||
"source": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Node"},
|
|
||||||
{"description": "The node source in the relation"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"target": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Node"},
|
|
||||||
{"description": "The node target in the relation"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,398 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"definitions": {
|
|
||||||
"AccessRight":{
|
|
||||||
"type":"object",
|
|
||||||
"properties":{
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "COAR access mode code: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Label for the access mode"
|
|
||||||
},
|
|
||||||
"scheme": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Scheme of reference for access right code. Always set to COAR access rights vocabulary: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ControlledField": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"scheme": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "To represent the information described by a scheme and a value in that scheme (i.e. pid)"
|
|
||||||
},
|
|
||||||
"Provenance": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The process that produced/provided the information"
|
|
||||||
},
|
|
||||||
"trust": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Indicates the process that produced (or provided) the information, and the trust associated to the information"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"author": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"fullname": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"id": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "The author's id and scheme. OpenAIRE currently supports 'ORCID'"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Provenance of author's pid"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"rank": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"surname": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"bestaccessright": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "COAR access mode code: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Label for the access mode"
|
|
||||||
},
|
|
||||||
"scheme": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Scheme of reference for access right code. Always set to COAR access rights vocabulary: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "The openest access right associated to the manifestations of this research results"
|
|
||||||
},
|
|
||||||
"codeRepositoryUrl": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'software': the URL to the repository with the source code"
|
|
||||||
},
|
|
||||||
"contactgroup": {
|
|
||||||
"description": "Only for results with type 'software': Information on the group responsible for providing further information regarding the resource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"contactperson": {
|
|
||||||
"description": "Only for results with type 'software': Information on the person responsible for providing further information regarding the resource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"container": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"conferencedate": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"conferenceplace": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"edition": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Edition of the journal or conference proceeding"
|
|
||||||
},
|
|
||||||
"ep": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "End page"
|
|
||||||
},
|
|
||||||
"iss": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Journal issue"
|
|
||||||
},
|
|
||||||
"issnLinking": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"issnOnline": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"issnPrinted": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Name of the journal or conference"
|
|
||||||
},
|
|
||||||
"sp": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "start page"
|
|
||||||
},
|
|
||||||
"vol": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Container has information about the conference or journal where the result has been presented or published"
|
|
||||||
},
|
|
||||||
"contributor": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of contributor"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"country": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "ISO 3166-1 alpha-2 country code"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this result is associated to the country."}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"coverage": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dateofcollection": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "When OpenAIRE collected the record the last time"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"documentationUrl": {
|
|
||||||
"description": "Only for results with type 'software': URL to the software documentation",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"embargoenddate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Date when the embargo ends and this result turns Open Access"
|
|
||||||
},
|
|
||||||
"format": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"geolocation": {
|
|
||||||
"description": "Geolocation information",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"box": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"place": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"point": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE Identifier"
|
|
||||||
},
|
|
||||||
"language": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "alpha-3/ISO 639-2 code of the language"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "English label"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lastupdatetimestamp": {
|
|
||||||
"type": "integer",
|
|
||||||
"description": "Timestamp of last update of the record in OpenAIRE"
|
|
||||||
},
|
|
||||||
"maintitle": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"originalId": {
|
|
||||||
"description": "Identifiers of the record at the original sources",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"description": "Persistent identifiers of the result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "scheme: list of available schemes are at https://api.openaire.eu/vocabularies/dnet:pid_types, value: the PID of the result "}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"instance":{
|
|
||||||
"type":"array",
|
|
||||||
"items":{
|
|
||||||
"type":"object",
|
|
||||||
"properties":{
|
|
||||||
"accessright":{
|
|
||||||
"allOf":[
|
|
||||||
{
|
|
||||||
"$ref":"#/definitions/AccessRight"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"description":"The accessright of this materialization of the result"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"articleprocessingcharge":{
|
|
||||||
"type":"object",
|
|
||||||
"properties":{
|
|
||||||
"amount":{
|
|
||||||
"type":"string"
|
|
||||||
},
|
|
||||||
"currency":{
|
|
||||||
"type":"string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"license":{
|
|
||||||
"type":"string"
|
|
||||||
},
|
|
||||||
"publicationdate":{
|
|
||||||
"type":"string"
|
|
||||||
},
|
|
||||||
"refereed":{
|
|
||||||
"type":"string"
|
|
||||||
},
|
|
||||||
"type":{
|
|
||||||
"type":"string",
|
|
||||||
"description":"The specific sub-type of this materialization of the result (see https://api.openaire.eu/vocabularies/dnet:result_typologies following the links)"
|
|
||||||
},
|
|
||||||
"url":{
|
|
||||||
"description":"Description of url",
|
|
||||||
"type":"array",
|
|
||||||
"items":{
|
|
||||||
"type":"string",
|
|
||||||
"description":"urls where it is possible to access the materialization of the result"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description":"One of the materialization for this result"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"programmingLanguage": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'software': the programming language"
|
|
||||||
},
|
|
||||||
"publicationdate": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"publisher": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"size": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'dataset': the declared size of the dataset"
|
|
||||||
},
|
|
||||||
"source": {
|
|
||||||
"description": "See definition of Dublin Core field dc:source",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"subjects": {
|
|
||||||
"description": "Keywords associated to the result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this subject is associated to the result"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"subject": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "OpenAIRE subject classification scheme (https://api.openaire.eu/vocabularies/dnet:subject_classification_typologies) and value. When the scheme is 'keyword', it means that the subject is free-text (i.e. not a term from a controlled vocabulary)."}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"subtitle": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"tool": {
|
|
||||||
"description": "Only for results with type 'other': tool useful for the interpretation and/or re-used of the research product",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Type of the result: one of 'publication', 'dataset', 'software', 'other' (see also https://api.openaire.eu/vocabularies/dnet:result_typologies)"
|
|
||||||
},
|
|
||||||
"version": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Version of the result"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>jobTracker</name>
|
|
||||||
<value>yarnRM</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>nameNode</name>
|
|
||||||
<value>hdfs://nameservice1</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.use.system.libpath</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<value>openaire</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
|
@ -1,563 +0,0 @@
|
||||||
<workflow-app name="dump_funder_results" xmlns="uri:oozie:workflow:0.5">
|
|
||||||
|
|
||||||
<parameters>
|
|
||||||
<property>
|
|
||||||
<name>upload</name>
|
|
||||||
<value>false</value>
|
|
||||||
<description>true to upload the dump for the funders in Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sourcePath</name>
|
|
||||||
<description>the source path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>isLookUpUrl</name>
|
|
||||||
<description>the isLookup service endpoint</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<description>the output path</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>accessToken</name>
|
|
||||||
<description>the access token used for the deposition in Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>connectionUrl</name>
|
|
||||||
<description>the connection url for Zenodo</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>metadata</name>
|
|
||||||
<description> the metadata associated to the deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>depositionType</name>
|
|
||||||
<description>the type of deposition we want to perform. "new" for brand new deposition, "version" for a new version of a published deposition (in this case the concept record id must be provided), "upload" to upload content to an open deposition for which we already have the deposition id (in this case the deposition id should be provided)</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>conceptRecordId</name>
|
|
||||||
<description>for new version, the id of the record for the old deposition</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>depositionId</name>
|
|
||||||
<description>the depositionId of a deposition open that has to be added content</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveDbName</name>
|
|
||||||
<description>the target hive database name</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveJdbcUrl</name>
|
|
||||||
<description>hive server jdbc url</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>hiveMetastoreUris</name>
|
|
||||||
<description>hive server metastore URIs</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkDriverMemory</name>
|
|
||||||
<description>memory for driver process</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozieActionShareLibForSpark2</name>
|
|
||||||
<description>oozie action sharelib for spark 2.*</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2ExtraListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
|
||||||
<description>spark 2.* extra listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2SqlQueryExecutionListeners</name>
|
|
||||||
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
|
||||||
<description>spark 2.* sql query execution listeners classname</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2YarnHistoryServerAddress</name>
|
|
||||||
<description>spark 2.* yarn history server address</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>spark2EventLogDir</name>
|
|
||||||
<description>spark 2.* event log dir location</description>
|
|
||||||
</property>
|
|
||||||
</parameters>
|
|
||||||
|
|
||||||
<global>
|
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
|
||||||
<name-node>${nameNode}</name-node>
|
|
||||||
<configuration>
|
|
||||||
<property>
|
|
||||||
<name>mapreduce.job.queuename</name>
|
|
||||||
<value>${queueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.launcher.mapred.job.queue.name</name>
|
|
||||||
<value>${oozieLauncherQueueName}</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
|
||||||
<value>${oozieActionShareLibForSpark2}</value>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</configuration>
|
|
||||||
</global>
|
|
||||||
|
|
||||||
<start to="reset_outputpath"/>
|
|
||||||
|
|
||||||
<kill name="Kill">
|
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
|
||||||
</kill>
|
|
||||||
|
|
||||||
<action name="reset_outputpath">
|
|
||||||
<fs>
|
|
||||||
<delete path="${outputPath}"/>
|
|
||||||
<mkdir path="${outputPath}"/>
|
|
||||||
</fs>
|
|
||||||
<ok to="save_community_map"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="save_community_map">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SaveCommunityMap</main-class>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="fork_result_linked_to_projects"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_result_linked_to_projects">
|
|
||||||
<path start="select_publication_linked_to_projects"/>
|
|
||||||
<path start="select_dataset_linked_to_projects"/>
|
|
||||||
<path start="select_orp_linked_to_project"/>
|
|
||||||
<path start="select_software_linked_to_projects"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="select_publication_linked_to_projects">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump funder results </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/publication</arg>
|
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_link"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="select_dataset_linked_to_projects">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump funder results </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/dataset</arg>
|
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_link"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="select_orp_linked_to_project">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump funder results </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_link"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="select_software_linked_to_projects">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump funder results </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkResultLinkedToProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/result/software</arg>
|
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}/relation</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_link"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_link" to="fork_dump"/>
|
|
||||||
|
|
||||||
<fork name="fork_dump">
|
|
||||||
<path start="dump_publication"/>
|
|
||||||
<path start="dump_dataset"/>
|
|
||||||
<path start="dump_orp"/>
|
|
||||||
<path start="dump_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="dump_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table publication for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/result/publication</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/publication</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--dumpType</arg><arg>funder</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table dataset for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/result/dataset</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--dumpType</arg><arg>funder</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table ORP for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/result/otherresearchproduct</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--dumpType</arg><arg>funder</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="dump_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump table software for community related products</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkDumpCommunityProducts</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/result/software</arg>
|
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/dump/software</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--dumpType</arg><arg>funder</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_dump"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<join name="join_dump" to="prepareResultProject"/>
|
|
||||||
|
|
||||||
<action name="prepareResultProject">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Prepare association result subset of project info</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkPrepareResultProject</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="fork_extendWithProject"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<fork name="fork_extendWithProject">
|
|
||||||
<path start="extend_publication"/>
|
|
||||||
<path start="extend_dataset"/>
|
|
||||||
<path start="extend_orp"/>
|
|
||||||
<path start="extend_software"/>
|
|
||||||
</fork>
|
|
||||||
|
|
||||||
<action name="extend_publication">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped publications with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/publication</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/publication</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_dataset">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped dataset with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/dataset</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/dataset</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_orp">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped ORP with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/otherresearchproduct</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/orp</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="extend_software">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Extend dumped software with information about project</name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.community.SparkUpdateProjectInfo</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/dump/software</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/ext/software</arg>
|
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="join_extend"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
<join name="join_extend" to="dump_funder_results"/>
|
|
||||||
|
|
||||||
<action name="dump_funder_results">
|
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
|
||||||
<master>yarn</master>
|
|
||||||
<mode>cluster</mode>
|
|
||||||
<name>Dump funder results </name>
|
|
||||||
<class>eu.dnetlib.dhp.oa.graph.dump.funderresults.SparkDumpFunderResults</class>
|
|
||||||
<jar>dhp-graph-mapper-${projectVersion}.jar</jar>
|
|
||||||
<spark-opts>
|
|
||||||
--executor-memory=${sparkExecutorMemory}
|
|
||||||
--executor-cores=${sparkExecutorCores}
|
|
||||||
--driver-memory=${sparkDriverMemory}
|
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
|
||||||
</spark-opts>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/ext</arg>
|
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/resultperfunder</arg>
|
|
||||||
<arg>--relationPath</arg><arg>${sourcePath}</arg>
|
|
||||||
</spark>
|
|
||||||
<ok to="make_archive"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<action name="make_archive">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/resultperfunder</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="should_upload"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<decision name="should_upload">
|
|
||||||
<switch>
|
|
||||||
<case to="send_zenodo">${wf:conf('upload') eq true}</case>
|
|
||||||
<default to="End"/>
|
|
||||||
</switch>
|
|
||||||
</decision>
|
|
||||||
|
|
||||||
<action name="send_zenodo">
|
|
||||||
<java>
|
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
|
||||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
|
||||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
|
||||||
<arg>--metadata</arg><arg>${metadata}</arg>
|
|
||||||
<arg>--communityMapPath</arg><arg>${workingDir}/communityMap</arg>
|
|
||||||
<arg>--conceptRecordId</arg><arg>${conceptRecordId}</arg>
|
|
||||||
<arg>--depositionType</arg><arg>${depositionType}</arg>
|
|
||||||
<arg>--depositionId</arg><arg>${depositionId}</arg>
|
|
||||||
</java>
|
|
||||||
<ok to="End"/>
|
|
||||||
<error to="Kill"/>
|
|
||||||
</action>
|
|
||||||
|
|
||||||
<end name="End"/>
|
|
||||||
|
|
||||||
</workflow-app>
|
|
|
@ -19,6 +19,12 @@
|
||||||
"paramLongName": "isSparkSessionManaged",
|
"paramLongName": "isSparkSessionManaged",
|
||||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rs",
|
||||||
|
"paramLongName": "removeSet",
|
||||||
|
"paramDescription": "the list of classname relations, split by ';', not to be dumped",
|
||||||
|
"paramRequired": false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,542 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"definitions": {
|
|
||||||
"AccessRight": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "COAR access mode code: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Label for the access mode"
|
|
||||||
},
|
|
||||||
"scheme": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Scheme of reference for access right code. Always set to COAR access rights vocabulary: http://vocabularies.coar-repositories.org/documentation/access_rights/"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ControlledField": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"scheme": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The scheme for the resource"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "the value in the scheme"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"KeyValue": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"key": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of key"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of value"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"Provenance": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The provenance of the information"
|
|
||||||
},
|
|
||||||
"trust": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The trust associated to the information"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"author": {
|
|
||||||
"description": "List of authors of the research results",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"affiliation": {
|
|
||||||
"description": "Affiliations of the author",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "One of the affiliation of the author"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"fullname": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Fullname of the author"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "First name of the author"
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"id": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "The author's id and scheme. OpenAIRE currently supports 'ORCID'"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "The provenance of the author's pid"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Persistent identifier of the author (e.g. ORCID)"
|
|
||||||
},
|
|
||||||
"rank": {
|
|
||||||
"type": "integer",
|
|
||||||
"description": "Order in which the author appears in the authors list"
|
|
||||||
},
|
|
||||||
"surname": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Surname of the author"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "One of the author of the research result"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"bestaccessright": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/AccessRight"},
|
|
||||||
{"description": "The openest access right associated to the manifestations of this research results"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"codeRepositoryUrl": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'software': the URL to the repository with the source code"
|
|
||||||
},
|
|
||||||
"collectedfrom": {
|
|
||||||
"description": "Information about the sources from which the record has been collected",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/KeyValue"},
|
|
||||||
{"description": "Key is the OpenAIRE identifier of the data source, value is its name"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"contactgroup": {
|
|
||||||
"description": "Only for results with type 'software': Information on the group responsible for providing further information regarding the resource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"contactperson": {
|
|
||||||
"description": "Only for results with type 'software': Information on the person responsible for providing further information regarding the resource",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"container": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"conferencedate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Date of the conference"
|
|
||||||
},
|
|
||||||
"conferenceplace": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Place of the conference"
|
|
||||||
},
|
|
||||||
"edition": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Edition of the journal or conference proceeding"
|
|
||||||
},
|
|
||||||
"ep": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "End page"
|
|
||||||
},
|
|
||||||
"iss": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Journal issue"
|
|
||||||
},
|
|
||||||
"issnLinking": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Journal linking iisn"
|
|
||||||
},
|
|
||||||
"issnOnline": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Journal online issn"
|
|
||||||
},
|
|
||||||
"issnPrinted": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Journal printed issn"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Name of the journal or conference"
|
|
||||||
},
|
|
||||||
"sp": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Start page"
|
|
||||||
},
|
|
||||||
"vol": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Volume"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Container has information about the conference or journal where the result has been presented or published"
|
|
||||||
},
|
|
||||||
"context": {
|
|
||||||
"description": "Reference to a relevant research infrastructure, initiative or community (RI/RC) among those collaborating with OpenAIRE. Please see https://connect.openaire.eu",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Code identifying the RI/RC"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Label of the RI/RC"
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"description": "Why this result is associated to the RI/RC.",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"}
|
|
||||||
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"contributor": {
|
|
||||||
"description": "Contributors of this result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"country": {
|
|
||||||
"description": "Country associated to this result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "ISO 3166-1 alpha-2 country code"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "English label of the country"
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this result is associated to the country."}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"coverage": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dateofcollection": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "When OpenAIRE collected the record the last time"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"documentationUrl": {
|
|
||||||
"description": "Only for results with type 'software': URL to the software documentation",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"embargoenddate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Date when the embargo ends and this result turns Open Access"
|
|
||||||
},
|
|
||||||
"externalReference": {
|
|
||||||
"description": "Links to external resources like entries from thematic databases (e.g. Protein Data Bank)",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this result is linked to the external resource"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"typology": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"value": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"format": {
|
|
||||||
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"geolocation": {
|
|
||||||
"description": "Geolocation information",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"box": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"place": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"point": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE identifier"
|
|
||||||
},
|
|
||||||
"instance": {
|
|
||||||
"description": "Manifestations (i.e. different versions) of the result. For example: the pre-print and the published versions are two manifestations of the same research result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"accessright": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/AccessRight"},
|
|
||||||
{"description": "Access right of this instance"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"collectedfrom": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/KeyValue"},
|
|
||||||
{"description": "Information about the source from which the instance has been collected. Key is the OpenAIRE identifier of the data source, value is its name"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"hostedby": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/KeyValue"},
|
|
||||||
{"description": "Information about the source from which the instance can be viewed or downloaded. Key is the OpenAIRE identifier of the data source, value is its name"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"license": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "License applied to the instance"
|
|
||||||
},
|
|
||||||
"publicationdate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Publication date of the instance"
|
|
||||||
},
|
|
||||||
"refereed": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Was the instance subject to peer-review? Possible values are 'Unknown', 'nonPeerReviewed', 'peerReviewed' (see also https://api.openaire.eu/vocabularies/dnet:review_levels)"
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Type of the instance. Possible values are listed at https://api.openaire.eu/vocabularies/dnet:publication_resource"
|
|
||||||
},
|
|
||||||
"url": {
|
|
||||||
"description":"Location where the instance is accessible",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"language": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "alpha-3/ISO 639-2 code of the language"
|
|
||||||
},
|
|
||||||
"label": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "English label"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lastupdatetimestamp": {
|
|
||||||
"type": "integer",
|
|
||||||
"description": "Timestamp of last update of the record in OpenAIRE"
|
|
||||||
},
|
|
||||||
"maintitle": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Title"
|
|
||||||
},
|
|
||||||
"originalId": {
|
|
||||||
"description": "Identifiers of the record at the original sources",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pid": {
|
|
||||||
"description": "Persistent identifiers of the result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "scheme: list of available schemes are at https://api.openaire.eu/vocabularies/dnet:pid_types, value: the PID of the result "}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"programmingLanguage": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'software': the programming language"
|
|
||||||
},
|
|
||||||
"projects": {
|
|
||||||
"description": "List of projects (i.e. grants) that (co-)funded the production ofn the research results",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"acronym": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Project acronym"
|
|
||||||
},
|
|
||||||
"code": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Grant code"
|
|
||||||
},
|
|
||||||
"funder": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"fundingStream": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Stream of funding (e.g. for European Commission can be H2020 or FP7)"
|
|
||||||
},
|
|
||||||
"jurisdiction": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Geographical jurisdiction (e.g. for European Commission is EU, for Croatian Science Foundation is HR)"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Name of the funder"
|
|
||||||
},
|
|
||||||
"shortName": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Short name or acronym of the funder"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": "Information about the funder funding the project"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "OpenAIRE identifier of the project"
|
|
||||||
},
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this project is associated to the result"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"title": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Title of the project"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"publicationdate": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Date of publication"
|
|
||||||
},
|
|
||||||
"publisher": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Publisher"
|
|
||||||
},
|
|
||||||
"size": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Only for results with type 'dataset': the declared size of the dataset"
|
|
||||||
},
|
|
||||||
"source": {
|
|
||||||
"description": "See definition of Dublin Core field dc:source",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"subjects": {
|
|
||||||
"description": "Keywords associated to the result",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"provenance": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/Provenance"},
|
|
||||||
{"description": "Why this subject is associated to the result"}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"subject": {
|
|
||||||
"allOf": [
|
|
||||||
{"$ref": "#/definitions/ControlledField"},
|
|
||||||
{"description": "OpenAIRE subject classification scheme (https://api.openaire.eu/vocabularies/dnet:subject_classification_typologies) and value. When the scheme is 'keyword', it means that the subject is free-text (i.e. not a term from a controlled vocabulary). "}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"subtitle": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Sub-title of the result"
|
|
||||||
},
|
|
||||||
"tool": {
|
|
||||||
"description": "Only for results with type 'other': tool useful for the interpretation and/or re-used of the research product",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Type of the result: one of 'publication', 'dataset', 'software', 'other' (see also https://api.openaire.eu/vocabularies/dnet:result_typologies)"
|
|
||||||
},
|
|
||||||
"version": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Version of the result"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -325,6 +325,7 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/validrelation</arg>
|
<arg>--sourcePath</arg><arg>${workingDir}/validrelation</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/relation/relation</arg>
|
<arg>--outputPath</arg><arg>${workingDir}/relation/relation</arg>
|
||||||
|
<arg>--removeSet</arg><arg>${removeSet}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_dump"/>
|
<ok to="join_dump"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -339,11 +339,10 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
Assertions.assertEquals("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2", gr.getId());
|
||||||
|
|
||||||
Assertions.assertEquals(2, gr.getOriginalId().size());
|
Assertions.assertEquals(1, gr.getOriginalId().size());
|
||||||
Assertions
|
Assertions
|
||||||
.assertTrue(
|
.assertTrue(
|
||||||
gr.getOriginalId().contains("50|pensoft_____::00ea4a1cd53806a97d62ea6bf268f2a2")
|
gr.getOriginalId().contains("10.3897/oneeco.2.e13718"));
|
||||||
&& gr.getOriginalId().contains("10.3897/oneeco.2.e13718"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(1, gr.getPid().size());
|
Assertions.assertEquals(1, gr.getPid().size());
|
||||||
Assertions
|
Assertions
|
||||||
|
@ -892,7 +891,6 @@ public class DumpJobTest {
|
||||||
DumpProducts dump = new DumpProducts();
|
DumpProducts dump = new DumpProducts();
|
||||||
dump
|
dump
|
||||||
.run(
|
.run(
|
||||||
// false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
|
||||||
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
false, sourcePath, workingDir.toString() + "/result", communityMapPath, Publication.class,
|
||||||
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
GraphResult.class, Constants.DUMPTYPE.COMPLETE.getType());
|
||||||
|
|
||||||
|
@ -924,6 +922,46 @@ public class DumpJobTest {
|
||||||
|
|
||||||
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
||||||
|
|
||||||
|
temp = spark
|
||||||
|
.sql(
|
||||||
|
"select id, inst.articleprocessingcharge.amount, inst.articleprocessingcharge.currency " +
|
||||||
|
"from check " +
|
||||||
|
"lateral view explode (instance) i as inst " +
|
||||||
|
"where inst.articleprocessingcharge is not null");
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"3131.64",
|
||||||
|
temp
|
||||||
|
.filter("id = '50|datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||||
|
.collectAsList()
|
||||||
|
.get(0)
|
||||||
|
.getString(1));
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"EUR",
|
||||||
|
temp
|
||||||
|
.filter("id = '50|datacite____::05c611fdfc93d7a2a703d1324e28104a'")
|
||||||
|
.collectAsList()
|
||||||
|
.get(0)
|
||||||
|
.getString(2));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"2578.35",
|
||||||
|
temp
|
||||||
|
.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||||
|
.collectAsList()
|
||||||
|
.get(0)
|
||||||
|
.getString(1));
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"EUR",
|
||||||
|
temp
|
||||||
|
.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'")
|
||||||
|
.collectAsList()
|
||||||
|
.get(0)
|
||||||
|
.getString(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ import com.github.victools.jsonschema.generator.*;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
|
||||||
|
|
||||||
@Disabled
|
//@Disabled
|
||||||
class GenerateJsonSchema {
|
class GenerateJsonSchema {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -21,7 +21,7 @@ class GenerateJsonSchema {
|
||||||
configBuilder.forFields().withDescriptionResolver(field -> "Description of " + field.getDeclaredName());
|
configBuilder.forFields().withDescriptionResolver(field -> "Description of " + field.getDeclaredName());
|
||||||
SchemaGeneratorConfig config = configBuilder.build();
|
SchemaGeneratorConfig config = configBuilder.build();
|
||||||
SchemaGenerator generator = new SchemaGenerator(config);
|
SchemaGenerator generator = new SchemaGenerator(config);
|
||||||
JsonNode jsonSchema = generator.generateSchema(Relation.class);
|
JsonNode jsonSchema = generator.generateSchema(GraphResult.class);
|
||||||
|
|
||||||
System.out.println(jsonSchema.toString());
|
System.out.println(jsonSchema.toString());
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,8 +81,6 @@ public class DumpRelationTest {
|
||||||
"-sourcePath", sourcePath
|
"-sourcePath", sourcePath
|
||||||
});
|
});
|
||||||
|
|
||||||
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Relation> tmp = sc
|
JavaRDD<Relation> tmp = sc
|
||||||
|
@ -144,8 +142,6 @@ public class DumpRelationTest {
|
||||||
"-sourcePath", sourcePath
|
"-sourcePath", sourcePath
|
||||||
});
|
});
|
||||||
|
|
||||||
// dumpCommunityProducts.exec(MOCK_IS_LOOK_UP_URL,Boolean.FALSE, workingDir.toString()+"/dataset",sourcePath,"eu.dnetlib.dhp.schema.oaf.Dataset","eu.dnetlib.dhp.schema.dump.oaf.Dataset");
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<Relation> tmp = sc
|
JavaRDD<Relation> tmp = sc
|
||||||
|
@ -203,4 +199,107 @@ public class DumpRelationTest {
|
||||||
"and validationDate = '2021-08-06'")
|
"and validationDate = '2021-08-06'")
|
||||||
.count());
|
.count());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test3() throws Exception {//
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/relation/relation")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
SparkDumpRelationJob.main(new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-outputPath", workingDir.toString() + "/relation",
|
||||||
|
"-sourcePath", sourcePath,
|
||||||
|
"-removeSet", "isParticipant"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/relation")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||||
|
|
||||||
|
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
||||||
|
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
verificationDataset.createOrReplaceTempView("table");
|
||||||
|
|
||||||
|
verificationDataset
|
||||||
|
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
|
||||||
|
Dataset<Row> check = spark
|
||||||
|
.sql(
|
||||||
|
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||||
|
+
|
||||||
|
"from table ");
|
||||||
|
|
||||||
|
Assertions.assertEquals(22, check.filter("name = 'isProvidedBy'").count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
22, check
|
||||||
|
.filter(
|
||||||
|
"name = 'isProvidedBy' and stype = 'datasource' and ttype = 'organization' and " +
|
||||||
|
"provenance = 'Harvested'")
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(0, check.filter("name = 'isParticipant'").count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(1, check.filter("name = 'isAuthorInstitutionOf'").count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, check
|
||||||
|
.filter(
|
||||||
|
"name = 'isAuthorInstitutionOf' and stype = 'organization' and ttype = 'result' " +
|
||||||
|
"and provenance = 'Inferred by OpenAIRE'")
|
||||||
|
.count());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void test4() throws Exception {//
|
||||||
|
final String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/relation/relation")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
SparkDumpRelationJob.main(new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-outputPath", workingDir.toString() + "/relation",
|
||||||
|
"-sourcePath", sourcePath,
|
||||||
|
"-removeSet", "isParticipant;isAuthorInstitutionOf"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/relation")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||||
|
|
||||||
|
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
||||||
|
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
verificationDataset.createOrReplaceTempView("table");
|
||||||
|
|
||||||
|
verificationDataset
|
||||||
|
.foreach((ForeachFunction<Relation>) r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
|
||||||
|
Dataset<Row> check = spark
|
||||||
|
.sql(
|
||||||
|
"SELECT reltype.name, source.id source, source.type stype, target.id target,target.type ttype, provenance.provenance "
|
||||||
|
+
|
||||||
|
"from table ");
|
||||||
|
|
||||||
|
Assertions.assertEquals(22, check.filter("name = 'isProvidedBy'").count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
22, check
|
||||||
|
.filter(
|
||||||
|
"name = 'isProvidedBy' and stype = 'datasource' and ttype = 'organization' and " +
|
||||||
|
"provenance = 'Harvested'")
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(0, check.filter("name = 'isParticipant'").count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(0, check.filter("name = 'isAuthorInstitutionOf'").count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
pom.xml
2
pom.xml
|
@ -758,7 +758,7 @@
|
||||||
<mockito-core.version>3.3.3</mockito-core.version>
|
<mockito-core.version>3.3.3</mockito-core.version>
|
||||||
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
<mongodb.driver.version>3.4.2</mongodb.driver.version>
|
||||||
<vtd.version>[2.12,3.0)</vtd.version>
|
<vtd.version>[2.12,3.0)</vtd.version>
|
||||||
<dhp-schemas.version>[2.8.22]</dhp-schemas.version>
|
<dhp-schemas.version>[2.9.24-SNAPSHOT]</dhp-schemas.version>
|
||||||
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
<dnet-actionmanager-api.version>[4.0.3]</dnet-actionmanager-api.version>
|
||||||
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
<dnet-actionmanager-common.version>[6.0.5]</dnet-actionmanager-common.version>
|
||||||
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
<dnet-openaire-broker-common.version>[3.1.6]</dnet-openaire-broker-common.version>
|
||||||
|
|
Loading…
Reference in New Issue