forked from D-Net/dnet-hadoop
merge branch with fork master
This commit is contained in:
commit
65bf312360
|
@ -0,0 +1,59 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import com.google.common.base.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represent a measure, must be further described by a system available resource providing name and descriptions.
|
||||||
|
*/
|
||||||
|
public class Measure {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unique measure identifier.
|
||||||
|
*/
|
||||||
|
private String id;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List of units associated with this measure. KeyValue provides a pair to store the laber (key) and the value, plus
|
||||||
|
* common provenance information.
|
||||||
|
*/
|
||||||
|
private List<KeyValue> unit;
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<KeyValue> getUnit() {
|
||||||
|
return unit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUnit(List<KeyValue> unit) {
|
||||||
|
this.unit = unit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void mergeFrom(Measure m) {
|
||||||
|
// TODO
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o)
|
||||||
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
Measure measure = (Measure) o;
|
||||||
|
return Objects.equal(id, measure.id) &&
|
||||||
|
Objects.equal(unit, measure.unit);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hashCode(id, unit);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
public class Programme implements Serializable {
|
||||||
|
private String code;
|
||||||
|
private String description;
|
||||||
|
|
||||||
|
public String getCode() {
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode(String code) {
|
||||||
|
this.code = code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDescription(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o)
|
||||||
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
Programme programme = (Programme) o;
|
||||||
|
return Objects.equals(code, programme.code);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -58,6 +58,8 @@ public class Project extends OafEntity implements Serializable {
|
||||||
|
|
||||||
private Float fundedamount;
|
private Float fundedamount;
|
||||||
|
|
||||||
|
private List<Programme> programme;
|
||||||
|
|
||||||
public Field<String> getWebsiteurl() {
|
public Field<String> getWebsiteurl() {
|
||||||
return websiteurl;
|
return websiteurl;
|
||||||
}
|
}
|
||||||
|
@ -266,6 +268,14 @@ public class Project extends OafEntity implements Serializable {
|
||||||
this.fundedamount = fundedamount;
|
this.fundedamount = fundedamount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<Programme> getProgramme() {
|
||||||
|
return programme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setProgramme(List<Programme> programme) {
|
||||||
|
this.programme = programme;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void mergeFrom(OafEntity e) {
|
public void mergeFrom(OafEntity e) {
|
||||||
super.mergeFrom(e);
|
super.mergeFrom(e);
|
||||||
|
@ -320,6 +330,9 @@ public class Project extends OafEntity implements Serializable {
|
||||||
fundedamount = p.getFundedamount() != null && compareTrust(this, e) < 0
|
fundedamount = p.getFundedamount() != null && compareTrust(this, e) < 0
|
||||||
? p.getFundedamount()
|
? p.getFundedamount()
|
||||||
: fundedamount;
|
: fundedamount;
|
||||||
|
|
||||||
|
programme = mergeLists(programme, p.getProgramme());
|
||||||
|
|
||||||
mergeOAFDataInfo(e);
|
mergeOAFDataInfo(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,16 @@ public class Relation extends Oaf {
|
||||||
*/
|
*/
|
||||||
private String target;
|
private String target;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Was this relationship authoritatively validated?
|
||||||
|
*/
|
||||||
|
private Boolean validated;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When was this relationship authoritatively validated.
|
||||||
|
*/
|
||||||
|
private String validationDate;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List of relation specific properties. Values include 'similarityLevel', indicating the similarity score between a
|
* List of relation specific properties. Values include 'similarityLevel', indicating the similarity score between a
|
||||||
* pair of publications.
|
* pair of publications.
|
||||||
|
@ -95,6 +105,22 @@ public class Relation extends Oaf {
|
||||||
this.properties = properties;
|
this.properties = properties;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Boolean getValidated() {
|
||||||
|
return validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValidated(Boolean validated) {
|
||||||
|
this.validated = validated;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getValidationDate() {
|
||||||
|
return validationDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValidationDate(String validationDate) {
|
||||||
|
this.validationDate = validationDate;
|
||||||
|
}
|
||||||
|
|
||||||
public void mergeFrom(final Relation r) {
|
public void mergeFrom(final Relation r) {
|
||||||
|
|
||||||
checkArgument(Objects.equals(getSource(), r.getSource()), "source ids must be equal");
|
checkArgument(Objects.equals(getSource(), r.getSource()), "source ids must be equal");
|
||||||
|
@ -137,4 +163,5 @@ public class Relation extends Oaf {
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(relType, subRelType, relClass, source, target, collectedfrom);
|
return Objects.hash(relType, subRelType, relClass, source, target, collectedfrom);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,8 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class Result extends OafEntity implements Serializable {
|
public class Result extends OafEntity implements Serializable {
|
||||||
|
|
||||||
|
private List<Measure> measures;
|
||||||
|
|
||||||
private List<Author> author;
|
private List<Author> author;
|
||||||
|
|
||||||
// resulttype allows subclassing results into publications | datasets | software
|
// resulttype allows subclassing results into publications | datasets | software
|
||||||
|
@ -53,6 +55,14 @@ public class Result extends OafEntity implements Serializable {
|
||||||
|
|
||||||
private List<Instance> instance;
|
private List<Instance> instance;
|
||||||
|
|
||||||
|
public List<Measure> getMeasures() {
|
||||||
|
return measures;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMeasures(List<Measure> measures) {
|
||||||
|
this.measures = measures;
|
||||||
|
}
|
||||||
|
|
||||||
public List<Author> getAuthor() {
|
public List<Author> getAuthor() {
|
||||||
return author;
|
return author;
|
||||||
}
|
}
|
||||||
|
@ -231,6 +241,8 @@ public class Result extends OafEntity implements Serializable {
|
||||||
|
|
||||||
Result r = (Result) e;
|
Result r = (Result) e;
|
||||||
|
|
||||||
|
// TODO consider merging also Measures
|
||||||
|
|
||||||
instance = mergeLists(instance, r.getInstance());
|
instance = mergeLists(instance, r.getInstance());
|
||||||
|
|
||||||
if (r.getBestaccessright() != null && compareTrust(this, r) < 0)
|
if (r.getBestaccessright() != null && compareTrust(this, r) < 0)
|
||||||
|
@ -259,9 +271,9 @@ public class Result extends OafEntity implements Serializable {
|
||||||
StructuredProperty newMainTitle = null;
|
StructuredProperty newMainTitle = null;
|
||||||
if (r.getTitle() != null) {
|
if (r.getTitle() != null) {
|
||||||
newMainTitle = getMainTitle(r.getTitle());
|
newMainTitle = getMainTitle(r.getTitle());
|
||||||
if (newMainTitle != null && title != null) {
|
if (newMainTitle != null) {
|
||||||
final StructuredProperty p = newMainTitle;
|
final StructuredProperty p = newMainTitle;
|
||||||
title = title.stream().filter(t -> t != p).collect(Collectors.toList());
|
r.setTitle(r.getTitle().stream().filter(t -> t != p).collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,25 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.scholexplorer;
|
package eu.dnetlib.dhp.schema.scholexplorer;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
public class DLIRelation extends Relation {
|
public class DLIRelation extends Relation {
|
||||||
|
|
||||||
private String dateOfCollection;
|
private String dateOfCollection;
|
||||||
|
|
||||||
|
private List<KeyValue> collectedFrom;
|
||||||
|
|
||||||
|
public List<KeyValue> getCollectedFrom() {
|
||||||
|
return collectedFrom;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCollectedFrom(List<KeyValue> collectedFrom) {
|
||||||
|
this.collectedFrom = collectedFrom;
|
||||||
|
}
|
||||||
|
|
||||||
public String getDateOfCollection() {
|
public String getDateOfCollection() {
|
||||||
return dateOfCollection;
|
return dateOfCollection;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,57 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
|
public class MeasureTest {
|
||||||
|
|
||||||
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||||
|
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMeasureSerialization() throws IOException {
|
||||||
|
|
||||||
|
Measure popularity = new Measure();
|
||||||
|
popularity.setId("popularity");
|
||||||
|
popularity
|
||||||
|
.setUnit(
|
||||||
|
Lists
|
||||||
|
.newArrayList(
|
||||||
|
unit("score", "0.5")));
|
||||||
|
|
||||||
|
Measure influence = new Measure();
|
||||||
|
influence.setId("influence");
|
||||||
|
influence
|
||||||
|
.setUnit(
|
||||||
|
Lists
|
||||||
|
.newArrayList(
|
||||||
|
unit("score", "0.3")));
|
||||||
|
|
||||||
|
List<Measure> m = Lists.newArrayList(popularity, influence);
|
||||||
|
|
||||||
|
String s = OBJECT_MAPPER.writeValueAsString(m);
|
||||||
|
System.out.println(s);
|
||||||
|
|
||||||
|
List<Measure> mm = OBJECT_MAPPER.readValue(s, new TypeReference<List<Measure>>() {
|
||||||
|
});
|
||||||
|
|
||||||
|
Assertions.assertNotNull(mm);
|
||||||
|
}
|
||||||
|
|
||||||
|
private KeyValue unit(String key, String value) {
|
||||||
|
KeyValue unit = new KeyValue();
|
||||||
|
unit.setKey(key);
|
||||||
|
unit.setValue(value);
|
||||||
|
return unit;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -38,48 +38,6 @@
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-actionmanager-common</artifactId>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-openaireplus-mapping-utils</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>saxonica</groupId>
|
|
||||||
<artifactId>saxon</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>saxonica</groupId>
|
|
||||||
<artifactId>saxon-dom</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>jgrapht</groupId>
|
|
||||||
<artifactId>jgrapht</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>net.sf.ehcache</groupId>
|
|
||||||
<artifactId>ehcache</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.springframework</groupId>
|
|
||||||
<artifactId>spring-test</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.*</groupId>
|
|
||||||
<artifactId>*</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>apache</groupId>
|
|
||||||
<artifactId>*</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-openaire-data-protos</artifactId>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>net.sf.saxon</groupId>
|
<groupId>net.sf.saxon</groupId>
|
||||||
|
@ -100,11 +58,15 @@
|
||||||
<artifactId>jaxen</artifactId>
|
<artifactId>jaxen</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-csv -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>hadoop-distcp</artifactId>
|
<artifactId>commons-csv</artifactId>
|
||||||
|
<version>1.8</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -0,0 +1,123 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class PrepareProgramme {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(PrepareProgramme.class);
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
PrepareProgramme.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/project/prepare_programme_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String programmePath = parser.get("programmePath");
|
||||||
|
log.info("programmePath {}: ", programmePath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}: ", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
removeOutputDir(spark, outputPath);
|
||||||
|
exec(spark, programmePath, outputPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void removeOutputDir(SparkSession spark, String path) {
|
||||||
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void exec(SparkSession spark, String programmePath, String outputPath) {
|
||||||
|
Dataset<CSVProgramme> programme = readPath(spark, programmePath, CSVProgramme.class);
|
||||||
|
|
||||||
|
programme
|
||||||
|
.toJavaRDD()
|
||||||
|
.filter(p -> !p.getCode().contains("FP7"))
|
||||||
|
.mapToPair(csvProgramme -> new Tuple2<>(csvProgramme.getCode(), csvProgramme))
|
||||||
|
.reduceByKey((a, b) -> {
|
||||||
|
if (StringUtils.isEmpty(a.getShortTitle())) {
|
||||||
|
if (StringUtils.isEmpty(b.getShortTitle())) {
|
||||||
|
if (StringUtils.isEmpty(a.getTitle())) {
|
||||||
|
if (StringUtils.isNotEmpty(b.getTitle())) {
|
||||||
|
a.setShortTitle(b.getTitle());
|
||||||
|
a.setLanguage(b.getLanguage());
|
||||||
|
}
|
||||||
|
} else {// notIsEmpty a.getTitle
|
||||||
|
if (StringUtils.isEmpty(b.getTitle())) {
|
||||||
|
a.setShortTitle(a.getTitle());
|
||||||
|
} else {
|
||||||
|
if (b.getLanguage().equalsIgnoreCase("en")) {
|
||||||
|
a.setShortTitle(b.getTitle());
|
||||||
|
a.setLanguage(b.getLanguage());
|
||||||
|
} else {
|
||||||
|
a.setShortTitle(a.getTitle());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {// not isEmpty b.getShortTitle
|
||||||
|
a.setShortTitle(b.getShortTitle());
|
||||||
|
// a.setLanguage(b.getLanguage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
|
||||||
|
})
|
||||||
|
.map(p -> {
|
||||||
|
CSVProgramme csvProgramme = p._2();
|
||||||
|
if (StringUtils.isEmpty(csvProgramme.getShortTitle())) {
|
||||||
|
csvProgramme.setShortTitle(csvProgramme.getTitle());
|
||||||
|
}
|
||||||
|
return OBJECT_MAPPER.writeValueAsString(csvProgramme);
|
||||||
|
})
|
||||||
|
.saveAsTextFile(outputPath);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <R> Dataset<R> readPath(
|
||||||
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,120 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class PrepareProjects {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(PrepareProgramme.class);
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
private static final HashMap<String, CSVProgramme> programmeMap = new HashMap<>();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
PrepareProjects.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/project/prepare_project_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String projectPath = parser.get("projectPath");
|
||||||
|
log.info("projectPath {}: ", projectPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}: ", outputPath);
|
||||||
|
|
||||||
|
final String dbProjectPath = parser.get("dbProjectPath");
|
||||||
|
log.info("dbProjectPath {}: ", dbProjectPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
removeOutputDir(spark, outputPath);
|
||||||
|
exec(spark, projectPath, dbProjectPath, outputPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void removeOutputDir(SparkSession spark, String path) {
|
||||||
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void exec(SparkSession spark, String projectPath, String dbProjectPath, String outputPath) {
|
||||||
|
Dataset<CSVProject> project = readPath(spark, projectPath, CSVProject.class);
|
||||||
|
Dataset<ProjectSubset> dbProjects = readPath(spark, dbProjectPath, ProjectSubset.class);
|
||||||
|
|
||||||
|
dbProjects
|
||||||
|
.joinWith(project, dbProjects.col("code").equalTo(project.col("id")), "left")
|
||||||
|
.flatMap(getTuple2CSVProjectFlatMapFunction(), Encoders.bean(CSVProject.class))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject> getTuple2CSVProjectFlatMapFunction() {
|
||||||
|
return (FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject>) value -> {
|
||||||
|
Optional<CSVProject> csvProject = Optional.ofNullable(value._2());
|
||||||
|
List<CSVProject> csvProjectList = new ArrayList<>();
|
||||||
|
if (csvProject.isPresent()) {
|
||||||
|
|
||||||
|
String[] programme = csvProject.get().getProgramme().split(";");
|
||||||
|
Arrays
|
||||||
|
.stream(programme)
|
||||||
|
.forEach(p -> {
|
||||||
|
CSVProject proj = new CSVProject();
|
||||||
|
proj.setProgramme(p);
|
||||||
|
proj.setId(csvProject.get().getId());
|
||||||
|
csvProjectList.add(proj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return csvProjectList.iterator();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <R> Dataset<R> readPath(
|
||||||
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class ProjectSubset implements Serializable {
|
||||||
|
|
||||||
|
private String code;
|
||||||
|
|
||||||
|
public String getCode() {
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode(String code) {
|
||||||
|
this.code = code;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,115 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.DbClient;
|
||||||
|
|
||||||
|
public class ReadProjectsFromDB implements Closeable {
|
||||||
|
|
||||||
|
private final DbClient dbClient;
|
||||||
|
private static final Log log = LogFactory.getLog(ReadProjectsFromDB.class);
|
||||||
|
private final Configuration conf;
|
||||||
|
private final BufferedWriter writer;
|
||||||
|
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private final static String query = "SELECT code " +
|
||||||
|
"from projects where id like 'corda__h2020%' ";
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
ReadProjectsFromDB.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json")));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String dbUrl = parser.get("postgresUrl");
|
||||||
|
final String dbUser = parser.get("postgresUser");
|
||||||
|
final String dbPassword = parser.get("postgresPassword");
|
||||||
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
|
|
||||||
|
try (final ReadProjectsFromDB rbl = new ReadProjectsFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
|
||||||
|
dbPassword)) {
|
||||||
|
|
||||||
|
log.info("Processing projects...");
|
||||||
|
rbl.execute(query, rbl::processProjectsEntry);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute(final String sql, final Function<ResultSet, List<ProjectSubset>> producer) throws Exception {
|
||||||
|
|
||||||
|
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeProject(r));
|
||||||
|
|
||||||
|
dbClient.processResults(sql, consumer);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<ProjectSubset> processProjectsEntry(ResultSet rs) {
|
||||||
|
try {
|
||||||
|
ProjectSubset p = new ProjectSubset();
|
||||||
|
p.setCode(rs.getString("code"));
|
||||||
|
|
||||||
|
return Arrays.asList(p);
|
||||||
|
|
||||||
|
} catch (final Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void writeProject(final ProjectSubset r) {
|
||||||
|
try {
|
||||||
|
writer.write(OBJECT_MAPPER.writeValueAsString(r));
|
||||||
|
writer.newLine();
|
||||||
|
} catch (final Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public ReadProjectsFromDB(
|
||||||
|
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
|
||||||
|
this.conf = new Configuration();
|
||||||
|
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||||
|
Path hdfsWritePath = new Path(hdfsPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = null;
|
||||||
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
fileSystem.delete(hdfsWritePath, false);
|
||||||
|
}
|
||||||
|
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||||
|
|
||||||
|
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
dbClient.close();
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,161 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
|
import org.apache.spark.rdd.SequenceFileRDDFunctions;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Programme;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
import scala.Function1;
|
||||||
|
import scala.Tuple2;
|
||||||
|
import scala.runtime.BoxedUnit;
|
||||||
|
|
||||||
|
public class SparkAtomicActionJob {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(SparkAtomicActionJob.class);
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
private static final HashMap<String, String> programmeMap = new HashMap<>();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
SparkAtomicActionJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/project/action_set_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
String projectPath = parser.get("projectPath");
|
||||||
|
log.info("projectPath: {}", projectPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}: ", outputPath);
|
||||||
|
|
||||||
|
final String programmePath = parser.get("programmePath");
|
||||||
|
log.info("programmePath {}: ", programmePath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
removeOutputDir(spark, outputPath);
|
||||||
|
getAtomicActions(
|
||||||
|
spark,
|
||||||
|
projectPath,
|
||||||
|
programmePath,
|
||||||
|
outputPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void removeOutputDir(SparkSession spark, String path) {
|
||||||
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void getAtomicActions(SparkSession spark, String projectPatH,
|
||||||
|
String programmePath,
|
||||||
|
String outputPath) {
|
||||||
|
|
||||||
|
Dataset<CSVProject> project = readPath(spark, projectPatH, CSVProject.class);
|
||||||
|
Dataset<CSVProgramme> programme = readPath(spark, programmePath, CSVProgramme.class);
|
||||||
|
|
||||||
|
project
|
||||||
|
.joinWith(programme, project.col("programme").equalTo(programme.col("code")), "left")
|
||||||
|
.map(c -> {
|
||||||
|
CSVProject csvProject = c._1();
|
||||||
|
Optional<CSVProgramme> csvProgramme = Optional.ofNullable(c._2());
|
||||||
|
if (csvProgramme.isPresent()) {
|
||||||
|
Project p = new Project();
|
||||||
|
p
|
||||||
|
.setId(
|
||||||
|
createOpenaireId(
|
||||||
|
ModelSupport.entityIdPrefix.get("project"),
|
||||||
|
"corda__h2020", csvProject.getId()));
|
||||||
|
Programme pm = new Programme();
|
||||||
|
pm.setCode(csvProject.getProgramme());
|
||||||
|
pm.setDescription(csvProgramme.get().getShortTitle());
|
||||||
|
p.setProgramme(Arrays.asList(pm));
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}, Encoders.bean(Project.class))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.groupByKey(
|
||||||
|
(MapFunction<Project, String>) p -> p.getId(),
|
||||||
|
Encoders.STRING())
|
||||||
|
.mapGroups((MapGroupsFunction<String, Project, Project>) (s, it) -> {
|
||||||
|
Project first = it.next();
|
||||||
|
it.forEachRemaining(p -> {
|
||||||
|
first.mergeFrom(p);
|
||||||
|
});
|
||||||
|
return first;
|
||||||
|
}, Encoders.bean(Project.class))
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(Project.class, p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <R> Dataset<R> readPath(
|
||||||
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String createOpenaireId(
|
||||||
|
final String prefix, final String nsPrefix, final String id) {
|
||||||
|
|
||||||
|
return String.format("%s|%s::%s", prefix, nsPrefix, DHPUtils.md5(id));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.csvutils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.commons.csv.CSVFormat;
|
||||||
|
import org.apache.commons.csv.CSVRecord;
|
||||||
|
import org.apache.commons.lang.reflect.FieldUtils;
|
||||||
|
|
||||||
|
public class CSVParser {
|
||||||
|
|
||||||
|
public <R> List<R> parse(String csvFile, String classForName)
|
||||||
|
throws ClassNotFoundException, IOException, IllegalAccessException, InstantiationException {
|
||||||
|
final CSVFormat format = CSVFormat.EXCEL
|
||||||
|
.withHeader()
|
||||||
|
.withDelimiter(';')
|
||||||
|
.withQuote('"')
|
||||||
|
.withTrim();
|
||||||
|
List<R> ret = new ArrayList<>();
|
||||||
|
final org.apache.commons.csv.CSVParser parser = org.apache.commons.csv.CSVParser.parse(csvFile, format);
|
||||||
|
final Set<String> headers = parser.getHeaderMap().keySet();
|
||||||
|
Class<?> clazz = Class.forName(classForName);
|
||||||
|
for (CSVRecord csvRecord : parser.getRecords()) {
|
||||||
|
final Object cc = clazz.newInstance();
|
||||||
|
for (String header : headers) {
|
||||||
|
FieldUtils.writeField(cc, header, csvRecord.get(header), true);
|
||||||
|
|
||||||
|
}
|
||||||
|
ret.add((R) cc);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.csvutils;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class CSVProgramme implements Serializable {
|
||||||
|
private String rcn;
|
||||||
|
private String code;
|
||||||
|
private String title;
|
||||||
|
private String shortTitle;
|
||||||
|
private String language;
|
||||||
|
|
||||||
|
public String getRcn() {
|
||||||
|
return rcn;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRcn(String rcn) {
|
||||||
|
this.rcn = rcn;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode() {
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode(String code) {
|
||||||
|
this.code = code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(String title) {
|
||||||
|
this.title = title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getShortTitle() {
|
||||||
|
return shortTitle;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setShortTitle(String shortTitle) {
|
||||||
|
this.shortTitle = shortTitle;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getLanguage() {
|
||||||
|
return language;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLanguage(String language) {
|
||||||
|
this.language = language;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,197 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.csvutils;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class CSVProject implements Serializable {
|
||||||
|
private String rcn;
|
||||||
|
private String id;
|
||||||
|
private String acronym;
|
||||||
|
private String status;
|
||||||
|
private String programme;
|
||||||
|
private String topics;
|
||||||
|
private String frameworkProgramme;
|
||||||
|
private String title;
|
||||||
|
private String startDate;
|
||||||
|
private String endDate;
|
||||||
|
private String projectUrl;
|
||||||
|
private String objective;
|
||||||
|
private String totalCost;
|
||||||
|
private String ecMaxContribution;
|
||||||
|
private String call;
|
||||||
|
private String fundingScheme;
|
||||||
|
private String coordinator;
|
||||||
|
private String coordinatorCountry;
|
||||||
|
private String participants;
|
||||||
|
private String participantCountries;
|
||||||
|
private String subjects;
|
||||||
|
|
||||||
|
public String getRcn() {
|
||||||
|
return rcn;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRcn(String rcn) {
|
||||||
|
this.rcn = rcn;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getAcronym() {
|
||||||
|
return acronym;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAcronym(String acronym) {
|
||||||
|
this.acronym = acronym;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStatus() {
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStatus(String status) {
|
||||||
|
this.status = status;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getProgramme() {
|
||||||
|
return programme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setProgramme(String programme) {
|
||||||
|
this.programme = programme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTopics() {
|
||||||
|
return topics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTopics(String topics) {
|
||||||
|
this.topics = topics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFrameworkProgramme() {
|
||||||
|
return frameworkProgramme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFrameworkProgramme(String frameworkProgramme) {
|
||||||
|
this.frameworkProgramme = frameworkProgramme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(String title) {
|
||||||
|
this.title = title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStartDate() {
|
||||||
|
return startDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStartDate(String startDate) {
|
||||||
|
this.startDate = startDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEndDate() {
|
||||||
|
return endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEndDate(String endDate) {
|
||||||
|
this.endDate = endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getProjectUrl() {
|
||||||
|
return projectUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setProjectUrl(String projectUrl) {
|
||||||
|
this.projectUrl = projectUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getObjective() {
|
||||||
|
return objective;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setObjective(String objective) {
|
||||||
|
this.objective = objective;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getTotalCost() {
|
||||||
|
return totalCost;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTotalCost(String totalCost) {
|
||||||
|
this.totalCost = totalCost;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEcMaxContribution() {
|
||||||
|
return ecMaxContribution;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEcMaxContribution(String ecMaxContribution) {
|
||||||
|
this.ecMaxContribution = ecMaxContribution;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCall() {
|
||||||
|
return call;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCall(String call) {
|
||||||
|
this.call = call;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFundingScheme() {
|
||||||
|
return fundingScheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFundingScheme(String fundingScheme) {
|
||||||
|
this.fundingScheme = fundingScheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCoordinator() {
|
||||||
|
return coordinator;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCoordinator(String coordinator) {
|
||||||
|
this.coordinator = coordinator;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCoordinatorCountry() {
|
||||||
|
return coordinatorCountry;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCoordinatorCountry(String coordinatorCountry) {
|
||||||
|
this.coordinatorCountry = coordinatorCountry;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getParticipants() {
|
||||||
|
return participants;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setParticipants(String participants) {
|
||||||
|
this.participants = participants;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getParticipantCountries() {
|
||||||
|
return participantCountries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setParticipantCountries(String participantCountries) {
|
||||||
|
this.participantCountries = participantCountries;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSubjects() {
|
||||||
|
return subjects;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSubjects(String subjects) {
|
||||||
|
this.subjects = subjects;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.csvutils;
|
||||||
|
|
||||||
|
import java.io.BufferedWriter;
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStreamWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.httpconnector.HttpConnector;
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
|
public class ReadCSV implements Closeable {
|
||||||
|
private static final Log log = LogFactory.getLog(ReadCSV.class);
|
||||||
|
private final Configuration conf;
|
||||||
|
private final BufferedWriter writer;
|
||||||
|
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
private String csvFile;
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
ReadCSV.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/project/parameters.json")));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String fileURL = parser.get("fileURL");
|
||||||
|
final String hdfsPath = parser.get("hdfsPath");
|
||||||
|
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||||
|
final String classForName = parser.get("classForName");
|
||||||
|
|
||||||
|
try (final ReadCSV readCSV = new ReadCSV(hdfsPath, hdfsNameNode, fileURL)) {
|
||||||
|
|
||||||
|
log.info("Getting CSV file...");
|
||||||
|
readCSV.execute(classForName);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute(final String classForName) throws Exception {
|
||||||
|
CSVParser csvParser = new CSVParser();
|
||||||
|
csvParser
|
||||||
|
.parse(csvFile, classForName)
|
||||||
|
.stream()
|
||||||
|
.forEach(p -> write(p));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
writer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ReadCSV(
|
||||||
|
final String hdfsPath,
|
||||||
|
final String hdfsNameNode,
|
||||||
|
final String fileURL)
|
||||||
|
throws Exception {
|
||||||
|
this.conf = new Configuration();
|
||||||
|
this.conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
HttpConnector httpConnector = new HttpConnector();
|
||||||
|
FileSystem fileSystem = FileSystem.get(this.conf);
|
||||||
|
Path hdfsWritePath = new Path(hdfsPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = null;
|
||||||
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
fileSystem.delete(hdfsWritePath, false);
|
||||||
|
}
|
||||||
|
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||||
|
|
||||||
|
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
|
||||||
|
this.csvFile = httpConnector.getInputSource(fileURL);
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void write(final Object p) {
|
||||||
|
try {
|
||||||
|
writer.write(OBJECT_MAPPER.writeValueAsString(p));
|
||||||
|
writer.newLine();
|
||||||
|
} catch (final Exception e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.httpconnector;
|
||||||
|
|
||||||
|
import java.util.LinkedList;
|
||||||
|
|
||||||
|
public class CollectorPluginErrorLogList extends LinkedList<String> {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = -6925786561303289704L;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
String log = new String();
|
||||||
|
int index = 0;
|
||||||
|
for (String errorMessage : this) {
|
||||||
|
log += String.format("Retry #%s: %s / ", index++, errorMessage);
|
||||||
|
}
|
||||||
|
return log;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.httpconnector;
|
||||||
|
|
||||||
|
public class CollectorServiceException extends Exception {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 7523999812098059764L;
|
||||||
|
|
||||||
|
public CollectorServiceException(String string) {
|
||||||
|
super(string);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectorServiceException(String string, Throwable exception) {
|
||||||
|
super(string, exception);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectorServiceException(Throwable exception) {
|
||||||
|
super(exception);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,240 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.httpconnector;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.*;
|
||||||
|
import java.security.GeneralSecurityException;
|
||||||
|
import java.security.cert.X509Certificate;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.net.ssl.HttpsURLConnection;
|
||||||
|
import javax.net.ssl.SSLContext;
|
||||||
|
import javax.net.ssl.TrustManager;
|
||||||
|
import javax.net.ssl.X509TrustManager;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author jochen, michele, andrea
|
||||||
|
*/
|
||||||
|
public class HttpConnector {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(HttpConnector.class);
|
||||||
|
|
||||||
|
private int maxNumberOfRetry = 6;
|
||||||
|
private int defaultDelay = 120; // seconds
|
||||||
|
private int readTimeOut = 120; // seconds
|
||||||
|
|
||||||
|
private String responseType = null;
|
||||||
|
|
||||||
|
private String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)";
|
||||||
|
|
||||||
|
public HttpConnector() {
|
||||||
|
CookieHandler.setDefault(new CookieManager(null, CookiePolicy.ACCEPT_ALL));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given the URL returns the content via HTTP GET
|
||||||
|
*
|
||||||
|
* @param requestUrl the URL
|
||||||
|
* @return the content of the downloaded resource
|
||||||
|
* @throws CollectorServiceException when retrying more than maxNumberOfRetry times
|
||||||
|
*/
|
||||||
|
public String getInputSource(final String requestUrl) throws CollectorServiceException {
|
||||||
|
return attemptDownlaodAsString(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given the URL returns the content as a stream via HTTP GET
|
||||||
|
*
|
||||||
|
* @param requestUrl the URL
|
||||||
|
* @return the content of the downloaded resource as InputStream
|
||||||
|
* @throws CollectorServiceException when retrying more than maxNumberOfRetry times
|
||||||
|
*/
|
||||||
|
public InputStream getInputSourceAsStream(final String requestUrl) throws CollectorServiceException {
|
||||||
|
return attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||||
|
}
|
||||||
|
|
||||||
|
private String attemptDownlaodAsString(final String requestUrl, final int retryNumber,
|
||||||
|
final CollectorPluginErrorLogList errorList)
|
||||||
|
throws CollectorServiceException {
|
||||||
|
try {
|
||||||
|
InputStream s = attemptDownload(requestUrl, 1, new CollectorPluginErrorLogList());
|
||||||
|
try {
|
||||||
|
return IOUtils.toString(s);
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("error while retrieving from http-connection occured: " + requestUrl, e);
|
||||||
|
Thread.sleep(defaultDelay * 1000);
|
||||||
|
errorList.add(e.getMessage());
|
||||||
|
return attemptDownlaodAsString(requestUrl, retryNumber + 1, errorList);
|
||||||
|
} finally {
|
||||||
|
IOUtils.closeQuietly(s);
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new CollectorServiceException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private InputStream attemptDownload(final String requestUrl, final int retryNumber,
|
||||||
|
final CollectorPluginErrorLogList errorList)
|
||||||
|
throws CollectorServiceException {
|
||||||
|
|
||||||
|
if (retryNumber > maxNumberOfRetry) {
|
||||||
|
throw new CollectorServiceException("Max number of retries exceeded. Cause: \n " + errorList);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Downloading " + requestUrl + " - try: " + retryNumber);
|
||||||
|
try {
|
||||||
|
InputStream input = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
final HttpURLConnection urlConn = (HttpURLConnection) new URL(requestUrl).openConnection();
|
||||||
|
urlConn.setInstanceFollowRedirects(false);
|
||||||
|
urlConn.setReadTimeout(readTimeOut * 1000);
|
||||||
|
urlConn.addRequestProperty("User-Agent", userAgent);
|
||||||
|
|
||||||
|
if (log.isDebugEnabled()) {
|
||||||
|
logHeaderFields(urlConn);
|
||||||
|
}
|
||||||
|
|
||||||
|
int retryAfter = obtainRetryAfter(urlConn.getHeaderFields());
|
||||||
|
if (retryAfter > 0 && urlConn.getResponseCode() == HttpURLConnection.HTTP_UNAVAILABLE) {
|
||||||
|
log.warn("waiting and repeating request after " + retryAfter + " sec.");
|
||||||
|
Thread.sleep(retryAfter * 1000);
|
||||||
|
errorList.add("503 Service Unavailable");
|
||||||
|
urlConn.disconnect();
|
||||||
|
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
||||||
|
} else if ((urlConn.getResponseCode() == HttpURLConnection.HTTP_MOVED_PERM)
|
||||||
|
|| (urlConn.getResponseCode() == HttpURLConnection.HTTP_MOVED_TEMP)) {
|
||||||
|
final String newUrl = obtainNewLocation(urlConn.getHeaderFields());
|
||||||
|
log.debug("The requested url has been moved to " + newUrl);
|
||||||
|
errorList
|
||||||
|
.add(
|
||||||
|
String
|
||||||
|
.format(
|
||||||
|
"%s %s. Moved to: %s", urlConn.getResponseCode(), urlConn.getResponseMessage(),
|
||||||
|
newUrl));
|
||||||
|
urlConn.disconnect();
|
||||||
|
return attemptDownload(newUrl, retryNumber + 1, errorList);
|
||||||
|
} else if (urlConn.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||||
|
log
|
||||||
|
.error(
|
||||||
|
String
|
||||||
|
.format("HTTP error: %s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||||
|
Thread.sleep(defaultDelay * 1000);
|
||||||
|
errorList.add(String.format("%s %s", urlConn.getResponseCode(), urlConn.getResponseMessage()));
|
||||||
|
urlConn.disconnect();
|
||||||
|
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
||||||
|
} else {
|
||||||
|
input = urlConn.getInputStream();
|
||||||
|
responseType = urlConn.getContentType();
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("error while retrieving from http-connection occured: " + requestUrl, e);
|
||||||
|
Thread.sleep(defaultDelay * 1000);
|
||||||
|
errorList.add(e.getMessage());
|
||||||
|
return attemptDownload(requestUrl, retryNumber + 1, errorList);
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new CollectorServiceException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logHeaderFields(final HttpURLConnection urlConn) throws IOException {
|
||||||
|
log.debug("StatusCode: " + urlConn.getResponseMessage());
|
||||||
|
|
||||||
|
for (Map.Entry<String, List<String>> e : urlConn.getHeaderFields().entrySet()) {
|
||||||
|
if (e.getKey() != null) {
|
||||||
|
for (String v : e.getValue()) {
|
||||||
|
log.debug(" key: " + e.getKey() + " - value: " + v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private int obtainRetryAfter(final Map<String, List<String>> headerMap) {
|
||||||
|
for (String key : headerMap.keySet()) {
|
||||||
|
if ((key != null) && key.toLowerCase().equals("retry-after") && (headerMap.get(key).size() > 0)
|
||||||
|
&& NumberUtils.isCreatable(headerMap.get(key).get(0))) {
|
||||||
|
return Integer
|
||||||
|
.parseInt(headerMap.get(key).get(0)) + 10;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String obtainNewLocation(final Map<String, List<String>> headerMap) throws CollectorServiceException {
|
||||||
|
for (String key : headerMap.keySet()) {
|
||||||
|
if ((key != null) && key.toLowerCase().equals("location") && (headerMap.get(key).size() > 0)) {
|
||||||
|
return headerMap.get(key).get(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new CollectorServiceException("The requested url has been MOVED, but 'location' param is MISSING");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* register for https scheme; this is a workaround and not intended for the use in trusted environments
|
||||||
|
*/
|
||||||
|
public void initTrustManager() {
|
||||||
|
final X509TrustManager tm = new X509TrustManager() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkClientTrusted(final X509Certificate[] xcs, final String string) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkServerTrusted(final X509Certificate[] xcs, final String string) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public X509Certificate[] getAcceptedIssuers() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
final SSLContext ctx = SSLContext.getInstance("TLS");
|
||||||
|
ctx.init(null, new TrustManager[] {
|
||||||
|
tm
|
||||||
|
}, null);
|
||||||
|
HttpsURLConnection.setDefaultSSLSocketFactory(ctx.getSocketFactory());
|
||||||
|
} catch (GeneralSecurityException e) {
|
||||||
|
log.fatal(e);
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMaxNumberOfRetry() {
|
||||||
|
return maxNumberOfRetry;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMaxNumberOfRetry(final int maxNumberOfRetry) {
|
||||||
|
this.maxNumberOfRetry = maxNumberOfRetry;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getDefaultDelay() {
|
||||||
|
return defaultDelay;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDefaultDelay(final int defaultDelay) {
|
||||||
|
this.defaultDelay = defaultDelay;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getReadTimeOut() {
|
||||||
|
return readTimeOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setReadTimeOut(final int readTimeOut) {
|
||||||
|
this.readTimeOut = readTimeOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResponseType() {
|
||||||
|
return responseType;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pjp",
|
||||||
|
"paramLongName": "projectPath",
|
||||||
|
"paramDescription": "the URL from where to get the projects file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pp",
|
||||||
|
"paramLongName": "programmePath",
|
||||||
|
"paramDescription": "the URL from where to get the programme file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "o",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the path of the new ActionSet",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,54 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_metastore_uris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<value>http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorNumber</name>
|
||||||
|
<value>4</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<value>/user/spark/spark2ApplicationHistory</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<value>15G</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<value>6G</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<value>1</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,146 @@
|
||||||
|
<workflow-app name="H2020Programme" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>projectFileURL</name>
|
||||||
|
<description>the url where to get the projects file</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>programmeFileURL</name>
|
||||||
|
<description>the url where to get the programme file</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>outputPath</name>
|
||||||
|
<description>path where to store the action set</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<start to="deleteoutputpath"/>
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
<action name="deleteoutputpath">
|
||||||
|
<fs>
|
||||||
|
<delete path='${outputPath}'/>
|
||||||
|
<mkdir path='${outputPath}'/>
|
||||||
|
<delete path='${workingDir}'/>
|
||||||
|
<mkdir path='${workingDir}'/>
|
||||||
|
</fs>
|
||||||
|
<ok to="get_project_file"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="get_project_file">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV</main-class>
|
||||||
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--fileURL</arg><arg>${projectFileURL}</arg>
|
||||||
|
<arg>--hdfsPath</arg><arg>${workingDir}/projects</arg>
|
||||||
|
<arg>--classForName</arg><arg>eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="get_programme_file"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="get_programme_file">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.actionmanager.project.csvutils.ReadCSV</main-class>
|
||||||
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--fileURL</arg><arg>${programmeFileURL}</arg>
|
||||||
|
<arg>--hdfsPath</arg><arg>${workingDir}/programme</arg>
|
||||||
|
<arg>--classForName</arg><arg>eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="read_projects"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="read_projects">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.actionmanager.project.ReadProjectsFromDB</main-class>
|
||||||
|
<arg>--hdfsPath</arg><arg>${workingDir}/dbProjects</arg>
|
||||||
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
|
||||||
|
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
|
||||||
|
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="prepare_programme"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="prepare_programme">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>PrepareProgramme</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.project.PrepareProgramme</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--programmePath</arg><arg>${workingDir}/programme</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/preparedProgramme</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="prepare_project"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="prepare_project">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>PrepareProjects</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.project.PrepareProjects</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--projectPath</arg><arg>${workingDir}/projects</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/preparedProjects</arg>
|
||||||
|
<arg>--dbProjectPath</arg><arg>${workingDir}/dbProjects</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="create_updates"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="create_updates">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>ProjectProgrammeAS</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.project.SparkAtomicActionJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--projectPath</arg><arg>${workingDir}/preparedProjects</arg>
|
||||||
|
<arg>--programmePath</arg><arg>${workingDir}/preparedProgramme</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,29 @@
|
||||||
|
[
|
||||||
|
|
||||||
|
{
|
||||||
|
"paramName": "fu",
|
||||||
|
"paramLongName" : "fileURL",
|
||||||
|
"paramDescription" : "the url of the file to download",
|
||||||
|
"paramRequired" : true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "hp",
|
||||||
|
"paramLongName" : "hdfsPath",
|
||||||
|
"paramDescription" : "where to save the file",
|
||||||
|
"paramRequired" : true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "hnn",
|
||||||
|
"paramLongName" : "hdfsNameNode",
|
||||||
|
"paramDescription" : "the name node",
|
||||||
|
"paramRequired" : true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "cfn",
|
||||||
|
"paramLongName" : "classForName",
|
||||||
|
"paramDescription" : "the name of the class to deserialize the csv to",
|
||||||
|
"paramRequired" : true
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
]
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pp",
|
||||||
|
"paramLongName": "programmePath",
|
||||||
|
"paramDescription": "the URL from where to get the programme file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "o",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the path of the new ActionSet",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,26 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pjp",
|
||||||
|
"paramLongName": "projectPath",
|
||||||
|
"paramDescription": "the URL from where to get the programme file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "o",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the path of the new ActionSet",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "dbp",
|
||||||
|
"paramLongName": "dbProjectPath",
|
||||||
|
"paramDescription": "the path of the project code read from db",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,32 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "p",
|
||||||
|
"paramLongName": "hdfsPath",
|
||||||
|
"paramDescription": "the path where storing the sequential file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "nn",
|
||||||
|
"paramLongName": "hdfsNameNode",
|
||||||
|
"paramDescription": "the name node on hdfs",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgurl",
|
||||||
|
"paramLongName": "postgresUrl",
|
||||||
|
"paramDescription": "postgres url, example: jdbc:postgresql://localhost:5432/testdb",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pguser",
|
||||||
|
"paramLongName": "postgresUser",
|
||||||
|
"paramDescription": "postgres user",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "pgpasswd",
|
||||||
|
"paramLongName": "postgresPassword",
|
||||||
|
"paramDescription": "postgres password",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,41 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVParser;
|
||||||
|
|
||||||
|
public class CSVParserTest {
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(CSVParserTest.class.getSimpleName());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void readProgrammeTest() throws Exception {
|
||||||
|
|
||||||
|
String programmecsv = IOUtils
|
||||||
|
.toString(
|
||||||
|
getClass()
|
||||||
|
.getClassLoader()
|
||||||
|
.getResourceAsStream("eu/dnetlib/dhp/actionmanager/project/programme.csv"));
|
||||||
|
|
||||||
|
CSVParser csvParser = new CSVParser();
|
||||||
|
|
||||||
|
List<Object> pl = csvParser.parse(programmecsv, "eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme");
|
||||||
|
|
||||||
|
System.out.println(pl.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,94 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||||
|
|
||||||
|
public class PrepareProgrammeTest {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static final ClassLoader cl = eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class
|
||||||
|
.getClassLoader();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(eu.dnetlib.dhp.actionmanager.project.PrepareProgrammeTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(PrepareProgrammeTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void numberDistinctProgrammeTest() throws Exception {
|
||||||
|
PrepareProgramme
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-programmePath",
|
||||||
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz").getPath(),
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/preparedProgramme"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<CSVProgramme> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/preparedProgramme")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, CSVProgramme.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(277, tmp.count());
|
||||||
|
|
||||||
|
Dataset<CSVProgramme> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(0, verificationDataset.filter("shortTitle =''").count());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,99 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProgramme;
|
||||||
|
import eu.dnetlib.dhp.actionmanager.project.csvutils.CSVProject;
|
||||||
|
|
||||||
|
public class PrepareProjectTest {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static final ClassLoader cl = PrepareProjectTest.class
|
||||||
|
.getClassLoader();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(PrepareProjectTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(PrepareProjectTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(PrepareProjectTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(PrepareProjectTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void numberDistinctProjectTest() throws Exception {
|
||||||
|
PrepareProjects
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-projectPath",
|
||||||
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(),
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/preparedProjects",
|
||||||
|
"-dbProjectPath",
|
||||||
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/dbProject").getPath(),
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<CSVProject> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/preparedProjects")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(8, tmp.count());
|
||||||
|
|
||||||
|
Dataset<CSVProject> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(0, verificationDataset.filter("length(id) = 0").count());
|
||||||
|
Assertions.assertEquals(0, verificationDataset.filter("length(programme) = 0").count());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,94 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
|
|
||||||
|
public class SparkUpdateProjectTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static final ClassLoader cl = eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class
|
||||||
|
.getClassLoader();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(eu.dnetlib.dhp.actionmanager.project.SparkUpdateProjectTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(SparkUpdateProjectTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void numberDistinctProgrammeTest() throws Exception {
|
||||||
|
SparkAtomicActionJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-programmePath",
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json.gz")
|
||||||
|
.getPath(),
|
||||||
|
"-projectPath",
|
||||||
|
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json").getPath(),
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Project> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Project) aa.getPayload()));
|
||||||
|
|
||||||
|
Assertions.assertEquals(14, tmp.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,39 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.project.httpconnector;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||||
|
import org.apache.http.ssl.SSLContextBuilder;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
public class HttpConnectorTest {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(HttpConnectorTest.class);
|
||||||
|
private static HttpConnector connector;
|
||||||
|
|
||||||
|
private static final String URL = "http://cordis.europa.eu/data/reference/cordisref-H2020programmes.csv";
|
||||||
|
private static final String URL_MISCONFIGURED_SERVER = "https://www.alexandria.unisg.ch/cgi/oai2?verb=Identify";
|
||||||
|
private static final String URL_GOODSNI_SERVER = "https://air.unimi.it/oai/openaire?verb=Identify";
|
||||||
|
|
||||||
|
private static final SSLContextBuilder sslContextBuilder = new SSLContextBuilder();
|
||||||
|
private static SSLConnectionSocketFactory sslSocketFactory;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void setUp() {
|
||||||
|
connector = new HttpConnector();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
|
||||||
|
public void testGetInputSource() throws CollectorServiceException {
|
||||||
|
System.out.println(connector.getInputSource(URL));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGoodServers() throws CollectorServiceException {
|
||||||
|
System.out.println(connector.getInputSource(URL_GOODSNI_SERVER));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
{"code":"894593"}
|
||||||
|
{"code":"897004"}
|
||||||
|
{"code":"896300"}
|
||||||
|
{"code":"892890"}
|
||||||
|
{"code":"886828"}
|
||||||
|
{"code":"8867767"}
|
||||||
|
{"code":"101003374"}
|
||||||
|
{"code":"886776"}
|
Binary file not shown.
|
@ -0,0 +1,16 @@
|
||||||
|
{"rcn":"229267","id":"894593","acronym":"ICARUS","status":"SIGNED","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019","frameworkProgramme":"H2020","title":"INTEGRATED COMMON ALTITUDE REFERENCE SYSTEM FOR U-SPACE","startDate":"2020-05-01","endDate":"2022-07-31","projectUrl":"","objective":"ICARUS project proposes an innovative solution to the challenge of the Common Altitude Reference inside VLL airspaces with the definition of a new U-space service and its validation in a real operational environment. In manned aviation, the methods of determining the altitude of an aircraft are based on pressure altitude difference measurements (e.g. QFE, QNH and FL) referred to a common datum. \nThe UA flights superimpose a new challenge, since a small drone may take off and land almost from everywhere, hence reducing the original significance of QFE settings, introduced on behalf of manned pilots to display on the altimeter the 0-height at touchdown on the local runway. In fact, the possibility for n drones to take off at n different places would generate a series of n different QFE corresponding to different heights of ground pressures referred to the take-off “Home points”. Therefore for a large number drones, new methodologies and procedures shall be put in place. The ICARUS defines a new U-space U3 service tightly coupled with the interface of the existing U-space services (e.g. Tracking, and Flight Planning services). The users of ICARUS service shall be remote pilots competent to fly in BVLOS in the specific category of UAS operations and ultralight GA pilots potentially sharing the same VLL airspace. \nThe ICARUS proposed approach foresees the realization of DTM service embedded in an Application Program Interface (API) that can be queried by UAS pilot/operator (or by drone itself) based on the actual positioning of the UA along its trajectory, computed by the (E)GNSS receiver. The output of the DTM service would provide information on distance from ground/obstacles in combination with the common altitude reference.\nAccuracy, continuity, integrity and availability requirements for GNSS-based altimetry together with accuracy and resolution requirements of the DTM to be provided by ICARUS service are key topics of the study.","totalCost":"1385286,25","ecMaxContribution":"1144587,5","call":"H2020-SESAR-2019-2","fundingScheme":"SESAR-RIA","coordinator":"E-GEOS SPA","coordinatorCountry":"IT","participants":"TOPVIEW SRL;TELESPAZIO SPA;DRONERADAR SP Z O.O.;EUROCONTROL - EUROPEAN ORGANISATION FOR THE SAFETY OF AIR NAVIGATION;EUROUSC ESPANA SL;POLITECNICO DI MILANO;UNIVERSITA DEGLI STUDI DI ROMA LA SAPIENZA","participantCountries":"IT;PL;BE;ES","subjects":""}
|
||||||
|
{"rcn":"229284","id":"897004","acronym":"ISLand","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Isolation and Segregation Landscape. Archaeology of quarantine in the Indian Ocean World","startDate":"2020-11-01","endDate":"2023-10-31","projectUrl":"","objective":"The proposed research presents an experimental and completely novel investigation within the historical archaeology,\napplied to isolated contexts. The main objective of ISLand is to provide a new way of thinking about human interactions\nwithin colonial empires and bringing colonial studies into dialogue with medical history and the emerging concept of\nhealthscaping. It seeks to do so by studying quarantine facilities in the Indian Ocean World during the long nineteenth\ncentury, a crucial period for the history of European empires in that region and a flashpoint for the conceptualization of\nmodern public health. Quarantine, traditionally viewed as merely a mechanism for the control of disease, will be analyzed as\nthe outward material response to important changes taking place socially, ecologically, and politically at the time.\nThe project is a part of an international, interdisciplinary effort, combining history, archaeology, and anthropology. The\nresearcher will tap numerous archival sources and archaeological data from selected sites, examine them through social and\nspatial analysis, and systematically analyze a test case in Mauritius through the most innovative methods that target\nlandscape and standing archaeology.\nThe broader impacts of ISLand have relevance for current European approaches to the migration crisis, where the threat of\ndisease has been ignited as a potentially debilitating consequence of immigration from extra-European countries. The\ntraining-through-research project at the Stanford University, the top institution where acquiring knowledge and skills in\nhistorical archaeology, will allow the applicant to develop into a position of professional maturity with a specific\ninterdisciplinary set of skills. With the support of the host institutions in EU, the researcher will promote historical archaeology\nin European academy, stimulating new approaches in usual archaeological research and an interdisciplinary approach with\ncultural anthropology.","totalCost":"253052,16","ecMaxContribution":"253052,16","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-GF","coordinator":"UNIVERSITEIT VAN AMSTERDAM","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229281","id":"896300","acronym":"STRETCH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Smart Textiles for RETrofitting and Monitoring of Cultural Heritage Buildings","startDate":"2020-09-01","endDate":"2022-08-31","projectUrl":"","objective":"This project aims to develop novel techniques using smart multifunctional materials for the combined seismic-plus-energy retrofitting, and Structural Health Monitoring (SHM) of the European cultural heritage buildings (CHB). The need for upgrading the existing old and CHB is becoming increasingly important for the EU countries, due to: (1) their poor structural performance during recent earthquakes (e.g. Italy, Greece) or other natural hazards (e.g. extreme weather conditions) that have resulted in significant economic losses, and loss of human lives; and (2) their low energy performance which increases significantly their energy consumption (buildings are responsible for 40% of EU energy consumption). Moreover, the SHM of the existing buildings is crucial for assessing continuously their structural integrity and thus to provide information for planning cost effective and sustainable maintenance decisions. Since replacing the old buildings with new is not financially feasible, and even it is not allowed for CHB, their lifetime extension requires considering simultaneously both structural and energy retrofitting. It is noted that the annual cost of repair and maintenance of existing European building stock is estimated to be about 50% of the total construction budget, currently standing at more than €300 billion. To achieve cost effectiveness, STRETCH explores a novel approach, which integrates technical textile reinforcement with thermal insulation systems and strain sensors to provide simultaneous structural-plus-energy retrofitting combined with SHM, tailored for masonry cultural heritage building envelopes. The effectiveness of the proposed retrofitting system will be validated experimentally and analytically. Moreover, draft guidelines and recommendations for determining future research on the use of smart composite materials for the concurrent retrofitting (structural-plus-energy) and SHM of the existing cultural heritage buildings envelopes will be proposed.","totalCost":"183473,28","ecMaxContribution":"183473,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"JRC -JOINT RESEARCH CENTRE- EUROPEAN COMMISSION","coordinatorCountry":"BE","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229265","id":"892890","acronym":"RhythmicPrediction","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Rhythmic prediction in speech perception: are our brain waves in sync with our native language?","startDate":"2021-01-01","endDate":"2022-12-31","projectUrl":"","objective":"Speech has rhythmic properties that widely differ across languages. When we listen to foreign languages, we may perceive them to be more musical, or rather more rap-like than our own. Even if we are unaware of it, the rhythm and melody of language, i.e. prosody, reflects its linguistic structure. On the one hand, prosody emphasizes content words and new information with stress and accents. On the other hand, it is aligned to phrase edges, marking them with boundary tones. Prosody hence helps the listener to focus on important words and to chunk sentences into phrases, and phrases into words. In fact, prosody is even used predictively, for instance to time the onset of the next word, the next piece of new information, or the total remaining length of the utterance, so the listener can seamlessly start their own speaking turn. \nSo, the listener, or rather their brain, is actively predicting when important speech events will happen, using prosody. How prosodic rhythms are exploited to predict speech timing, however, is unclear. No link between prosody and neural predictive processing has yet been empirically made. One hypothesis is that rhythm, such as the alternation of stressed and unstressed syllables, helps listeners time their attention. Similar behavior is best captured by the notion of an internal oscillator which can be set straight by attentional spikes. While neuroscientific evidence for the relation of neural oscillators to speech processing is starting to emerge, no link to the use of prosody nor predictive listening exists, yet. Furthermore, it is still unknown how native language knowledge affects cortical oscillations, and how oscillations are affected by cross-linguistic differences in rhythmic structure. The current project combines the standing knowledge of prosodic typology with the recent advances in neuroscience on cortical oscillations, to investigate the role of internal oscillators on native prosody perception, and active speech prediction.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE DE GENEVE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229235","id":"886828","acronym":"ASAP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Advanced Solutions for Asphalt Pavements","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"The Advanced Solutions for Asphalt Pavements (ASAP) project involves the development of a unique road paving technology which will use a bio-bitumen rejuvenator to rejuvenate aged asphalt bitumen. This technology will help to extend the lifespan of asphalt pavements (roads) and will reduce the environmental and economic impact of roads and road maintenance processes. Recycling and self-healing processes will replace fossil fuel dependent technology. Self-healing will involve rejuvenating aged asphalt bitumen using a bio-rejuvenator developed using microalgae oils (rejuvenating bio-oil). Microalgae has been selected because of its fast growth, versatility and ability to survive within hostile environments, such as wastewater. \n\nASAP will utilise microalgae, cultivated within the wastewater treatment process, as a source of the rejuvenating bio-oil. The solvent (Soxhlet) processes will be used to extract the oil from the microalgae. To ensure the efficiency of the oil extraction process, an ultrasonication process will be used to pre-treat the microalgae. The suitability of rejuvenating bio-oil as a replacement for the bitumen rejuvenator (fossil fuel based) will be ascertained via a series of standard bituminous and accelerated tests. A rejuvenator-binder diffusion numerical model will be developed, based on the Delft Lattice concrete diffusion model, to determine the conditions required for rejuvenation to occur and to ascertain the healing rate of the asphalt binder. These parameters will facilitate the selection and optimisation of the asphalt self-healing systems (specifically the amount of bio-oil rejuvenator and time required) to achieve full rejuvenation. \n\nThis novel approach will benchmark the effectiveness of this intervention against existing asphalt design and maintenance processes and assess feasibility. The ASAP project presents an opportunity to revolutionise road design and maintenance processes and reduce its environmental and financial costs.","totalCost":"187572,48","ecMaxContribution":"187572,48","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"NEDERLANDSE ORGANISATIE VOOR TOEGEPAST NATUURWETENSCHAPPELIJK ONDERZOEK TNO","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":null,"id":"886776","acronym":null,"status":null,"programme":"H2020-EU.2.1.4.","topics":null,"frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""}
|
||||||
|
{"rcn":null,"id":"886776","acronym":null,"status":null,"programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4","frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""}
|
||||||
|
{"rcn":"229276","id":"895426","acronym":"DisMoBoH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Dissecting the molecular building principles of locally formed transcriptional hubs","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Numerous DNA variants have already been identified that modulate inter-individual molecular traits – most prominently gene expression. However, since finding mechanistic interpretations relating genotype to phenotype has proven challenging, the focus has shifted to higher-order regulatory features, i.e. chromatin accessibility, transcription factor (TF) binding and 3D chromatin interactions. This revealed at least two enhancer types: “lead” enhancers in which the presence of genetic variants modulates the activity of entire chromatin domains, and “dependent” ones in which variants induce subtle changes, affecting DNA accessibility, but not transcription. Although cell type-specific TFs are likely important, it remains unclear which sequence features are required to establish such enhancer hierarchies, and under which circumstances genetic variation results in altered enhancer-promoter contacts and differential gene expression. Here, we propose to investigate the molecular mechanisms that link DNA variation to TF binding, chromatin topology, and gene expression response. We will leverage data on enhancer hierarchy and sequence-specific TF binding to identify the sequence signatures that define “lead” enhancers. The results will guide the design of a synthetic locus that serves as an in vivo platform to systematically vary the building blocks of local transcriptional units: i) DNA sequence – including variations in TF binding site affinity and syntax, ii) molecular interactions between TFs, and iii) chromatin conformation. To validate our findings, we will perform optical reconstruction of chromatin architecture for a select number of DNA variants. By simultaneously perturbing co-dependent features, this proposal will provide novel mechanistic insights into the formation of local transcriptional hubs.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-RI","coordinator":"ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229288","id":"898218","acronym":"devUTRs","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Uncovering the roles of 5′UTRs in translational control during early zebrafish development","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Following fertilisation, metazoan embryos are transcriptionally silent, and embryogenesis is controlled by maternally deposited factors. Developmental progression requires the synthesis of new mRNAs and proteins in a coordinated fashion. Many posttranscriptional mechanisms regulate the fate of maternal mRNAs, but it is less understood how translational control shapes early embryogenesis. In eukaryotes, translation starts at the mRNA 5′ end, consisting of the 5′ cap and 5′ untranslated region (UTR). Protein synthesis is primarily regulated at the translation initiation step by elements within the 5′UTR. However, the role of 5′UTRs in regulating the dynamics of mRNA translation during vertebrate embryogenesis remains unexplored. For example, all vertebrate ribosomal protein (RP) mRNAs harbor a conserved terminal oligopyrimidine tract (TOP) in their 5′UTR. RP levels must be tightly controlled to ensure proper organismal development, but if and how the TOP motif mediates RP mRNA translational regulation during embryogenesis is unclear. Overall, we lack a systematic understanding of the regulatory information contained in 5′UTRs. In this work, I aim to uncover the 5′UTR in vivo rules for mRNA translational regulation during zebrafish embryogenesis. I propose to apply imaging and biochemical approaches to characterise the role of the TOP motif in RP mRNA translational regulation during embryogenesis and identify the trans-acting factor(s) that bind(s) to it (Aim 1). To systematically assess the contribution of 5′UTRs to mRNA translational regulation during zebrafish embryogenesis, I will couple a massively parallel reporter assay of 5′UTRs to polysome profiling (Aim 2). By integrating the translational behaviour of 5′UTR reporters throughout embryogenesis with sequence-based regression models, I anticipate to uncover novel cis-regulatory elements in 5′UTRs with developmental roles.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITAT BASEL","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229261","id":"893787","acronym":"HOLYHOST","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Welfare and Hosting buildings in the “Holy Land” between the 4th and the 7th c. AD","startDate":"2020-10-01","endDate":"2022-09-30","projectUrl":"","objective":"Between the 4th and the 7th century AD, many hospices dedicated to the poor, elderly, strangers and travelers were built in the countryside, along roads, around and inside cities. They were commissioned by the Church, rich pious men and women concerned by the redeem of their sins, as well as emperors who saw this as a guarantee of social stability. Welfare is thus an important phenomena of Late Antiquity, abundantly mentioned by ancient literary sources and inscriptions, particularly in the eastern part of the Empire. However, the buildings that provided shelter and care to the needy have not yet received sufficient attention from archaeologists. Except for buildings which were identified by their inventors as hostels dedicated to pilgrims, they are still invisible in the field. \nThe aim of the HOLYHOST research project is to bring this social history’s main topic on the field of archaeology. It will address the welfare issue through the archaeological and architectural survey and study of Ancient welfare and hosting establishments’ remains, in the Holy Land (Palestine and Jordan) and around. This work will contribute to a better understanding of the practices linked to hospitality, welfare, accommodation and care in Antiquity. Moreover, such establishments served as models for medieval and modern Islamic, Jewish and Christian waqf institutions (religious endowment), and welfare continues to be highly relevant nowadays, through issues still at the heart of contemporary challenges debated in Europe: poverty, social exclusion, migrant crisis, principle of reception and hospitality. This interdisciplinary and diachronic research project will thus offer many new research perspectives, in terms of history of architecture, evolution of care practices, social and political regulations.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE PARIS I PANTHEON-SORBONNE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229282","id":"896189","acronym":"MICADO","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Microbial contribution to continental wetland carbon budget","startDate":"2021-01-04","endDate":"2023-01-03","projectUrl":"","objective":"Continental wetlands are major carbon dioxide sinks but the second largest source of methane. Monitoring of wetland methane emissions revealed large inter-site variability that is hard to explain in the framework of current biogeochemical theories. Methane production in wetlands is an anaerobic microbial driven process involving a complex set of microbial metabolisms depending on the availability of (i) energy (via the presence of specific redox couples), (ii) organic substrates and (iii) specific microbial communities. To understand the complexity of microbial drivers on wetland methane emissions and quantify their contribution, the MICADO project will set up a multidisciplinary approach linking isotope organic geochemistry and environmental microbiology to assess microbial functioning in situ. As an organic geochemist I have developed an innovative approach to trace in situ microbial activity via compound specific carbon isotope analysis of microbe macromolecules and organic metabolites. The host institution is a leader in France in environmental microbiology and biogeochemistry developing high-throughput metagenomics and microbial rate assessments, for which I will be trained during the MICADO project. These techniques are highly complementary and combined they will provide a comprehensive knowledge on microbial metabolisms involved in organic matter degradation encompassing their complexity and interactions. This will revisit the relationships between organic substrate availability and microbial communities and will contribute at estimating the impact of microbial activity on wetland methane emissions. This project will give me the opportunity to acquire fundamental knowledge and to develop original lines of research that will consolidate my position as an independent scientist in biogeochemistry.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"CENTRE NATIONAL DE LA RECHERCHE SCIENTIFIQUE CNRS","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229249","id":"891624","acronym":"CuTAN","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Copper-Catalyzed Multicomponent Reactions in Tandem Processes for Target Molecule Synthesis","startDate":"2021-02-01","endDate":"2023-01-31","projectUrl":"","objective":"The invention of processes that can form several bonds, stereocentres and rings in a single process is key to a sustainable future in synthetic chemistry. Multicomponent reactions and tandem procedures are two strategies that enable the rapid build-up of molecular complexity from simple reagents. By combining these two strategies into a single procedure, the diversity, complexity and value of products can be further enhanced along with the efficiency and economy of their construction. In this project, Dr Satpathi will develop novel copper-catalyzed multicomponent couplings of unsaturated hydrocarbons (e.g. allenes, enynes) with imines and boron reagents. These procedures will provide high-value amine products with universally high regio-, diastero- and enantiocontrol. The products will bear a variety of synthetic handles, for example, amino, alkynyl/alkenyl, and boryl groups, thus the products are primed for subsequent transformation. Dr Satpathi will exploit this functionality in tandem intramolecular couplings (e.g. intramolecular Suzuki/Buchwald-Hartwig reactions) to provide core cyclic structures of drug molecules and natural products. Thus, through a tandem procedure of; 1) copper-catalyzed borofunctionalization, and; 2) subsequent transition-metal catalyzed cyclization, he will gain efficient access to highly sought-after complex molecules. Overall, the process will provide high-value, chiral, cyclic motifs from abundant, achiral, linear substrates. Finally, Dr Satpathi has identified the phthalide-isoquinoline family of alkaloids as target molecules to display the power of his tandem methodology. Dr Satpathi has devised a novel route, which begins with our tandem multifunctionalization/cyclization reaction, to provide a range of these important alkaloids. The chosen alkaloids are of particular interest as they display a range of bioactivities – for example as natural products, receptor antagonists and on-market drugs.","totalCost":"212933,76","ecMaxContribution":"212933,76","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"THE UNIVERSITY OF MANCHESTER","coordinatorCountry":"UK","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.2.1.4.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""}
|
||||||
|
{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""}
|
||||||
|
{"rcn":"229258","id":"892834","acronym":"DENVPOC","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"qPCR Microfluidics point-of-care platform for dengue diagnosis","startDate":"2020-05-18","endDate":"2022-05-17","projectUrl":"","objective":"As a result of Global climate change and fast urbanization, global outbreaks of Dengue (DENV)/ Zika(ZIKV)/Chikungunya(CHIKV) virus have the potential to occur. The most common pathway of these infections in humans is through the female Aedes mosquito vector. DENV is an exanthematous febrile disease with varied clinical manifestations and progressions . Due to similarities in symptoms between DENV and ZIKV and CHIKV, it is difficult to make a differential diagnosis, impeding appropriate, timely medical intervention. Furthermore, cross-reactivity with ZIKV, which was recently related to microcephaly, is a serious issue. In 2016, in Brazil alone, there were 4180 microcephaly cases reported instead of 163 cases, more in line with yearly expected projections , , Thus, the sooner an accurate diagnostic which differentiates DENV from the other manifestations is critical; most especially at the early stages of the infection, to have a reliable diagnosis in pregnant women. In 2016, the OMS emergency committee declared that the outbreaks and the potentially resultant neurological disorders in Brazil were an important international state of emergency in public health, as a result of the associated secondary effects; these diseases became a Global concern. This project allows developing a highly and fast Multiplex qPCR POC platform by using FASTGENE technology with a minimal amount of patient serotype. It would reduce the time of analysis (30 to 90’ for a standard) and costs. Additionally, the sample preprocessing and thermalization will shorten real-time PCR amplification time and will be integrated within the microfluidic systems. This platform can result in a commercialized product whereupon a main market target would be pregnant women and people living or traveling through/from outbreak risk areas.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-SE","coordinator":"BFORCURE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229280","id":"895716","acronym":"DoMiCoP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"The Diffusion of Migration Control Practice. Actors, Processes and Effects.","startDate":"2021-03-01","endDate":"2023-02-28","projectUrl":"","objective":"DoMiCoP develops new understandings and perspectives to study migration control in practice in the European Union by asking one main question: how and why do communities of practice develop and diffuse the knowledge required to put migration control into action? Unlike the nexus between expert knowledge, epistemic communities and policy formulation, the nexus between everyday knowledge, communities of practice and policy implementation has not yet received systematic scholarly attention. My project bridges that gap by focusing on intermediate arenas in which communities of practice take shape most notably the meetings and trainings that gather state and non-state actors involved in putting asylum, detention and removal into practice. By building on field-based methodologies (interviews and participant observations), DoMiCoP sheds ethnographic light on the role that ‘learning from abroad’ plays in the implementation of migration control in the EU. My project’s aim is threefold: 1) Identifying arenas at intermediate levels in which communities of practice take shape; 2) Analysing the communities of practice by focusing on the configurations of actors and organizations involved, the motivations underlying their involvement, the process of knowledge development in interaction, the conflicts and negotiations; 3) Revealing the role of non-state organizations (private for profit and not-for-profit). From a theoretical point of view, this project goes beyond the classical view of the implementation as a test to assess the effectiveness of policy transfers towards an analysis of policy transfer at that level of policy-making. From an empirical point of view, the project expands knowledge about less-studied venues of policy-making and provides original thick descriptions. From a methodological point of view, the project engages with qualitative methods for the study of policy diffusion and aims at responding to their main challenges through participant observation.","totalCost":"163673,28","ecMaxContribution":"163673,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"EUROPEAN UNIVERSITY INSTITUTE","coordinatorCountry":"IT","participants":"","participantCountries":"","subjects":""}
|
|
@ -0,0 +1,25 @@
|
||||||
|
rcn;code;title;shortTitle;language
|
||||||
|
664331;H2020-EU.3.3.2.;Un approvisionnement en électricité à faible coût et à faibles émissions de carbone;Low-cost, low-carbon energy supply;fr
|
||||||
|
664355;H2020-EU.3.3.7.;Absorción por el mercado de la innovación energética - explotación del Programa Energía Inteligente - Europa Europe;Market uptake of energy innovation;es
|
||||||
|
664323;H2020-EU.3.3.1.;Ridurre il consumo di energia e le emissioni di carbonio grazie all'uso intelligente e sostenibile;Reducing energy consumption and carbon footprint;it
|
||||||
|
664233;H2020-EU.2.3.2.3.;Wsparcie innowacji rynkowych;Supporting market-driven innovation;pl
|
||||||
|
664199;H2020-EU.2.1.5.1.;Tecnologías para las fábricas del futuro;Technologies for Factories of the Future;es
|
||||||
|
664235;H2020-EU.3.;PRIORITÉ «Défis de société»;Societal Challenges;fr
|
||||||
|
664355;H2020-EU.3.3.7.;"Assorbimento di mercato dell'innovazione energetica - iniziative fondate sul programma ""Energia intelligente - Europa""";Market uptake of energy innovation;it
|
||||||
|
664355;H2020-EU.3.3.7.;"Markteinführung von Energieinnovationen – Aufbau auf ""Intelligente Energie – Europa";Market uptake of energy innovation;de
|
||||||
|
664235;H2020-EU.3.;"PRIORIDAD ""Retos de la sociedad""";Societal Challenges;es
|
||||||
|
664231;H2020-EU.2.3.2.2.;Mejorar la capacidad de innovación de las PYME;Enhancing the innovation capacity of SMEs;es
|
||||||
|
664223;H2020-EU.2.3.;LIDERAZGO INDUSTRIAL - Innovación en la pequeña y mediana empresa;Innovation in SMEs;es
|
||||||
|
664323;H2020-EU.3.3.1.;Réduire la consommation d'énergie et l'empreinte carbone en utilisant l'énergie de manière intelligente et durable;Reducing energy consumption and carbon footprint;fr
|
||||||
|
664323;H2020-EU.3.3.1.;Reducir el consumo de energía y la huella de carbono mediante un uso inteligente y sostenible;Reducing energy consumption and carbon footprint;es
|
||||||
|
664215;H2020-EU.2.1.6.4.;Beitrag der europäischen Forschung zu internationalen Weltraumpartnerschaften;Research in support of international space partnerships;de
|
||||||
|
664213;H2020-EU.2.1.6.3.;Permettere lo sfruttamento dei dati spaziali;;it
|
||||||
|
664213;H2020-EU.2.1.6.3.;Permettre l'exploitation des données spatiales;Enabling exploitation of space data;fr
|
||||||
|
664231;H2020-EU.2.3.2.2.;Zwiększenie zdolności MŚP pod względem innowacji;Enhancing the innovation capacity of SMEs;pl
|
||||||
|
664231;H2020-EU.2.3.2.2.;Rafforzare la capacità di innovazione delle PMI;Enhancing the innovation capacity of SMEs;it
|
||||||
|
664213;H2020-EU.2.1.6.3.;Grundlagen für die Nutzung von Weltraumdaten;Enabling exploitation of space data;de
|
||||||
|
664211;H2020-EU.2.1.6.2.;Favorecer los avances en las tecnologías espaciales;Enabling advances in space technology;es
|
||||||
|
664209;H2020-EU.2.1.6.1.;Assurer la compétitivité et l'indépendance de l'Europe et promouvoir l'innovation dans le secteur spatial européen;Competitiveness, non-dependence and innovation;fr
|
||||||
|
664231;H2020-EU.2.3.2.2.;Renforcement de la capacité d'innovation des PME;Enhancing the innovation capacity of SMEs;fr
|
||||||
|
664203;H2020-EU.2.1.5.3.;Tecnologías sostenibles, eficientes en su utilización de recursos y de baja emisión de carbono en las industrias de transformación de gran consumo energético;Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries;es
|
||||||
|
664103;H2020-EU.1.2.1.;FET Open;FET Open;es
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
{"rcn":"229267","id":"894593","acronym":"ICARUS","status":"SIGNED","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019","frameworkProgramme":"H2020","title":"INTEGRATED COMMON ALTITUDE REFERENCE SYSTEM FOR U-SPACE","startDate":"2020-05-01","endDate":"2022-07-31","projectUrl":"","objective":"ICARUS project proposes an innovative solution to the challenge of the Common Altitude Reference inside VLL airspaces with the definition of a new U-space service and its validation in a real operational environment. In manned aviation, the methods of determining the altitude of an aircraft are based on pressure altitude difference measurements (e.g. QFE, QNH and FL) referred to a common datum. \nThe UA flights superimpose a new challenge, since a small drone may take off and land almost from everywhere, hence reducing the original significance of QFE settings, introduced on behalf of manned pilots to display on the altimeter the 0-height at touchdown on the local runway. In fact, the possibility for n drones to take off at n different places would generate a series of n different QFE corresponding to different heights of ground pressures referred to the take-off “Home points”. Therefore for a large number drones, new methodologies and procedures shall be put in place. The ICARUS defines a new U-space U3 service tightly coupled with the interface of the existing U-space services (e.g. Tracking, and Flight Planning services). The users of ICARUS service shall be remote pilots competent to fly in BVLOS in the specific category of UAS operations and ultralight GA pilots potentially sharing the same VLL airspace. \nThe ICARUS proposed approach foresees the realization of DTM service embedded in an Application Program Interface (API) that can be queried by UAS pilot/operator (or by drone itself) based on the actual positioning of the UA along its trajectory, computed by the (E)GNSS receiver. The output of the DTM service would provide information on distance from ground/obstacles in combination with the common altitude reference.\nAccuracy, continuity, integrity and availability requirements for GNSS-based altimetry together with accuracy and resolution requirements of the DTM to be provided by ICARUS service are key topics of the study.","totalCost":"1385286,25","ecMaxContribution":"1144587,5","call":"H2020-SESAR-2019-2","fundingScheme":"SESAR-RIA","coordinator":"E-GEOS SPA","coordinatorCountry":"IT","participants":"TOPVIEW SRL;TELESPAZIO SPA;DRONERADAR SP Z O.O.;EUROCONTROL - EUROPEAN ORGANISATION FOR THE SAFETY OF AIR NAVIGATION;EUROUSC ESPANA SL;POLITECNICO DI MILANO;UNIVERSITA DEGLI STUDI DI ROMA LA SAPIENZA","participantCountries":"IT;PL;BE;ES","subjects":""}
|
||||||
|
{"rcn":"229284","id":"897004","acronym":"ISLand","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Isolation and Segregation Landscape. Archaeology of quarantine in the Indian Ocean World","startDate":"2020-11-01","endDate":"2023-10-31","projectUrl":"","objective":"The proposed research presents an experimental and completely novel investigation within the historical archaeology,\napplied to isolated contexts. The main objective of ISLand is to provide a new way of thinking about human interactions\nwithin colonial empires and bringing colonial studies into dialogue with medical history and the emerging concept of\nhealthscaping. It seeks to do so by studying quarantine facilities in the Indian Ocean World during the long nineteenth\ncentury, a crucial period for the history of European empires in that region and a flashpoint for the conceptualization of\nmodern public health. Quarantine, traditionally viewed as merely a mechanism for the control of disease, will be analyzed as\nthe outward material response to important changes taking place socially, ecologically, and politically at the time.\nThe project is a part of an international, interdisciplinary effort, combining history, archaeology, and anthropology. The\nresearcher will tap numerous archival sources and archaeological data from selected sites, examine them through social and\nspatial analysis, and systematically analyze a test case in Mauritius through the most innovative methods that target\nlandscape and standing archaeology.\nThe broader impacts of ISLand have relevance for current European approaches to the migration crisis, where the threat of\ndisease has been ignited as a potentially debilitating consequence of immigration from extra-European countries. The\ntraining-through-research project at the Stanford University, the top institution where acquiring knowledge and skills in\nhistorical archaeology, will allow the applicant to develop into a position of professional maturity with a specific\ninterdisciplinary set of skills. With the support of the host institutions in EU, the researcher will promote historical archaeology\nin European academy, stimulating new approaches in usual archaeological research and an interdisciplinary approach with\ncultural anthropology.","totalCost":"253052,16","ecMaxContribution":"253052,16","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-GF","coordinator":"UNIVERSITEIT VAN AMSTERDAM","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229281","id":"896300","acronym":"STRETCH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Smart Textiles for RETrofitting and Monitoring of Cultural Heritage Buildings","startDate":"2020-09-01","endDate":"2022-08-31","projectUrl":"","objective":"This project aims to develop novel techniques using smart multifunctional materials for the combined seismic-plus-energy retrofitting, and Structural Health Monitoring (SHM) of the European cultural heritage buildings (CHB). The need for upgrading the existing old and CHB is becoming increasingly important for the EU countries, due to: (1) their poor structural performance during recent earthquakes (e.g. Italy, Greece) or other natural hazards (e.g. extreme weather conditions) that have resulted in significant economic losses, and loss of human lives; and (2) their low energy performance which increases significantly their energy consumption (buildings are responsible for 40% of EU energy consumption). Moreover, the SHM of the existing buildings is crucial for assessing continuously their structural integrity and thus to provide information for planning cost effective and sustainable maintenance decisions. Since replacing the old buildings with new is not financially feasible, and even it is not allowed for CHB, their lifetime extension requires considering simultaneously both structural and energy retrofitting. It is noted that the annual cost of repair and maintenance of existing European building stock is estimated to be about 50% of the total construction budget, currently standing at more than €300 billion. To achieve cost effectiveness, STRETCH explores a novel approach, which integrates technical textile reinforcement with thermal insulation systems and strain sensors to provide simultaneous structural-plus-energy retrofitting combined with SHM, tailored for masonry cultural heritage building envelopes. The effectiveness of the proposed retrofitting system will be validated experimentally and analytically. Moreover, draft guidelines and recommendations for determining future research on the use of smart composite materials for the concurrent retrofitting (structural-plus-energy) and SHM of the existing cultural heritage buildings envelopes will be proposed.","totalCost":"183473,28","ecMaxContribution":"183473,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"JRC -JOINT RESEARCH CENTRE- EUROPEAN COMMISSION","coordinatorCountry":"BE","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229265","id":"892890","acronym":"RhythmicPrediction","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Rhythmic prediction in speech perception: are our brain waves in sync with our native language?","startDate":"2021-01-01","endDate":"2022-12-31","projectUrl":"","objective":"Speech has rhythmic properties that widely differ across languages. When we listen to foreign languages, we may perceive them to be more musical, or rather more rap-like than our own. Even if we are unaware of it, the rhythm and melody of language, i.e. prosody, reflects its linguistic structure. On the one hand, prosody emphasizes content words and new information with stress and accents. On the other hand, it is aligned to phrase edges, marking them with boundary tones. Prosody hence helps the listener to focus on important words and to chunk sentences into phrases, and phrases into words. In fact, prosody is even used predictively, for instance to time the onset of the next word, the next piece of new information, or the total remaining length of the utterance, so the listener can seamlessly start their own speaking turn. \nSo, the listener, or rather their brain, is actively predicting when important speech events will happen, using prosody. How prosodic rhythms are exploited to predict speech timing, however, is unclear. No link between prosody and neural predictive processing has yet been empirically made. One hypothesis is that rhythm, such as the alternation of stressed and unstressed syllables, helps listeners time their attention. Similar behavior is best captured by the notion of an internal oscillator which can be set straight by attentional spikes. While neuroscientific evidence for the relation of neural oscillators to speech processing is starting to emerge, no link to the use of prosody nor predictive listening exists, yet. Furthermore, it is still unknown how native language knowledge affects cortical oscillations, and how oscillations are affected by cross-linguistic differences in rhythmic structure. The current project combines the standing knowledge of prosodic typology with the recent advances in neuroscience on cortical oscillations, to investigate the role of internal oscillators on native prosody perception, and active speech prediction.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE DE GENEVE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229235","id":"886828","acronym":"ASAP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Advanced Solutions for Asphalt Pavements","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"The Advanced Solutions for Asphalt Pavements (ASAP) project involves the development of a unique road paving technology which will use a bio-bitumen rejuvenator to rejuvenate aged asphalt bitumen. This technology will help to extend the lifespan of asphalt pavements (roads) and will reduce the environmental and economic impact of roads and road maintenance processes. Recycling and self-healing processes will replace fossil fuel dependent technology. Self-healing will involve rejuvenating aged asphalt bitumen using a bio-rejuvenator developed using microalgae oils (rejuvenating bio-oil). Microalgae has been selected because of its fast growth, versatility and ability to survive within hostile environments, such as wastewater. \n\nASAP will utilise microalgae, cultivated within the wastewater treatment process, as a source of the rejuvenating bio-oil. The solvent (Soxhlet) processes will be used to extract the oil from the microalgae. To ensure the efficiency of the oil extraction process, an ultrasonication process will be used to pre-treat the microalgae. The suitability of rejuvenating bio-oil as a replacement for the bitumen rejuvenator (fossil fuel based) will be ascertained via a series of standard bituminous and accelerated tests. A rejuvenator-binder diffusion numerical model will be developed, based on the Delft Lattice concrete diffusion model, to determine the conditions required for rejuvenation to occur and to ascertain the healing rate of the asphalt binder. These parameters will facilitate the selection and optimisation of the asphalt self-healing systems (specifically the amount of bio-oil rejuvenator and time required) to achieve full rejuvenation. \n\nThis novel approach will benchmark the effectiveness of this intervention against existing asphalt design and maintenance processes and assess feasibility. The ASAP project presents an opportunity to revolutionise road design and maintenance processes and reduce its environmental and financial costs.","totalCost":"187572,48","ecMaxContribution":"187572,48","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"NEDERLANDSE ORGANISATIE VOOR TOEGEPAST NATUURWETENSCHAPPELIJK ONDERZOEK TNO","coordinatorCountry":"NL","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229236","id":"886776","acronym":"BIOBESTicide","status":"SIGNED","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4","frameworkProgramme":"H2020","title":"BIO-Based pESTicides production for sustainable agriculture management plan","startDate":"2020-05-01","endDate":"2023-04-30","projectUrl":"","objective":"The BIOBESTicide project will validate and demonstrate the production of an effective and cost-efficient biopesticide. The demonstration will be based on an innovative bio-based value chain starting from the valorisation of sustainable biomasses, i.e. beet pulp and sugar molasses and will exploit the properties of the oomycete Pythium oligandrum strain I-5180 to increase natural plant defenses, to produce an highly effective and eco-friendly biopesticide solution for vine plants protection. \nBIOVITIS, the project coordinator, has developed, at laboratory level (TRL4), an effective method to biocontrol one of the major causes of worldwide vineyards destruction, the Grapevine Trunk Diseases (GTDs). The protection system is based on the oomycete Pythium oligandrum strain I-5180 that, at applied at optimal time and concentration, colonises the root of vines and stimulates the natural plant defences against GTDs, providing a protection that ranges between 40% and 60%. \nBIOBESTicide project will respond to the increasing demands for innovative solutions for crop protection agents, transferring the technology to a DEMO Plant able to produce more than 10 T of a high-quality oomycete-based biopesticide product per year (TRL7). \nThe BIOBESTicide project will validate the efficiency of the formulated product on vineyards of different geographical areas.\nTo assure the safety of products under both health and environmental points of view, a full and complete approval dossier for Pythium oligandrum strain I-5180 will be submitted in all the European countries. \nA Life Cycle Sustainability Assessment (LCSA) will be conducted to assess the environmental, economic and social impacts of the developed products.\nThe adoption of the effective and cost-efficient biopesticide will have significant impacts with a potential ROI of 30 % in just 5 years and a total EBITDA of more than € 6,400,000.","totalCost":"4402772,5","ecMaxContribution":"3069653","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"BIOVITIS","coordinatorCountry":"FR","participants":"MERCIER FRERES SARL;FUNDACION TECNALIA RESEARCH & INNOVATION;LAMBERTI SPA;EURION CONSULTING;CIAOTECH Srl;STOWARZYSZENIE ZACHODNIOPOMORSKI KLASTER CHEMICZNY ZIELONA CHEMIA;NORDZUCKER AG;INSTITUT NATIONAL DE RECHERCHE POUR L'AGRICULTURE, L'ALIMENTATION ET L'ENVIRONNEMENT;INSTITUT FRANCAIS DE LA VIGNE ET DU VIN","participantCountries":"FR;ES;IT;PL;DE","subjects":""}
|
||||||
|
{"rcn":"229276","id":"895426","acronym":"DisMoBoH","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Dissecting the molecular building principles of locally formed transcriptional hubs","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Numerous DNA variants have already been identified that modulate inter-individual molecular traits – most prominently gene expression. However, since finding mechanistic interpretations relating genotype to phenotype has proven challenging, the focus has shifted to higher-order regulatory features, i.e. chromatin accessibility, transcription factor (TF) binding and 3D chromatin interactions. This revealed at least two enhancer types: “lead” enhancers in which the presence of genetic variants modulates the activity of entire chromatin domains, and “dependent” ones in which variants induce subtle changes, affecting DNA accessibility, but not transcription. Although cell type-specific TFs are likely important, it remains unclear which sequence features are required to establish such enhancer hierarchies, and under which circumstances genetic variation results in altered enhancer-promoter contacts and differential gene expression. Here, we propose to investigate the molecular mechanisms that link DNA variation to TF binding, chromatin topology, and gene expression response. We will leverage data on enhancer hierarchy and sequence-specific TF binding to identify the sequence signatures that define “lead” enhancers. The results will guide the design of a synthetic locus that serves as an in vivo platform to systematically vary the building blocks of local transcriptional units: i) DNA sequence – including variations in TF binding site affinity and syntax, ii) molecular interactions between TFs, and iii) chromatin conformation. To validate our findings, we will perform optical reconstruction of chromatin architecture for a select number of DNA variants. By simultaneously perturbing co-dependent features, this proposal will provide novel mechanistic insights into the formation of local transcriptional hubs.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-RI","coordinator":"ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229288","id":"898218","acronym":"devUTRs","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Uncovering the roles of 5′UTRs in translational control during early zebrafish development","startDate":"2021-09-01","endDate":"2023-08-31","projectUrl":"","objective":"Following fertilisation, metazoan embryos are transcriptionally silent, and embryogenesis is controlled by maternally deposited factors. Developmental progression requires the synthesis of new mRNAs and proteins in a coordinated fashion. Many posttranscriptional mechanisms regulate the fate of maternal mRNAs, but it is less understood how translational control shapes early embryogenesis. In eukaryotes, translation starts at the mRNA 5′ end, consisting of the 5′ cap and 5′ untranslated region (UTR). Protein synthesis is primarily regulated at the translation initiation step by elements within the 5′UTR. However, the role of 5′UTRs in regulating the dynamics of mRNA translation during vertebrate embryogenesis remains unexplored. For example, all vertebrate ribosomal protein (RP) mRNAs harbor a conserved terminal oligopyrimidine tract (TOP) in their 5′UTR. RP levels must be tightly controlled to ensure proper organismal development, but if and how the TOP motif mediates RP mRNA translational regulation during embryogenesis is unclear. Overall, we lack a systematic understanding of the regulatory information contained in 5′UTRs. In this work, I aim to uncover the 5′UTR in vivo rules for mRNA translational regulation during zebrafish embryogenesis. I propose to apply imaging and biochemical approaches to characterise the role of the TOP motif in RP mRNA translational regulation during embryogenesis and identify the trans-acting factor(s) that bind(s) to it (Aim 1). To systematically assess the contribution of 5′UTRs to mRNA translational regulation during zebrafish embryogenesis, I will couple a massively parallel reporter assay of 5′UTRs to polysome profiling (Aim 2). By integrating the translational behaviour of 5′UTR reporters throughout embryogenesis with sequence-based regression models, I anticipate to uncover novel cis-regulatory elements in 5′UTRs with developmental roles.","totalCost":"191149,44","ecMaxContribution":"191149,44","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITAT BASEL","coordinatorCountry":"CH","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229261","id":"893787","acronym":"HOLYHOST","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Welfare and Hosting buildings in the “Holy Land” between the 4th and the 7th c. AD","startDate":"2020-10-01","endDate":"2022-09-30","projectUrl":"","objective":"Between the 4th and the 7th century AD, many hospices dedicated to the poor, elderly, strangers and travelers were built in the countryside, along roads, around and inside cities. They were commissioned by the Church, rich pious men and women concerned by the redeem of their sins, as well as emperors who saw this as a guarantee of social stability. Welfare is thus an important phenomena of Late Antiquity, abundantly mentioned by ancient literary sources and inscriptions, particularly in the eastern part of the Empire. However, the buildings that provided shelter and care to the needy have not yet received sufficient attention from archaeologists. Except for buildings which were identified by their inventors as hostels dedicated to pilgrims, they are still invisible in the field. \nThe aim of the HOLYHOST research project is to bring this social history’s main topic on the field of archaeology. It will address the welfare issue through the archaeological and architectural survey and study of Ancient welfare and hosting establishments’ remains, in the Holy Land (Palestine and Jordan) and around. This work will contribute to a better understanding of the practices linked to hospitality, welfare, accommodation and care in Antiquity. Moreover, such establishments served as models for medieval and modern Islamic, Jewish and Christian waqf institutions (religious endowment), and welfare continues to be highly relevant nowadays, through issues still at the heart of contemporary challenges debated in Europe: poverty, social exclusion, migrant crisis, principle of reception and hospitality. This interdisciplinary and diachronic research project will thus offer many new research perspectives, in terms of history of architecture, evolution of care practices, social and political regulations.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSITE PARIS I PANTHEON-SORBONNE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229282","id":"896189","acronym":"MICADO","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Microbial contribution to continental wetland carbon budget","startDate":"2021-01-04","endDate":"2023-01-03","projectUrl":"","objective":"Continental wetlands are major carbon dioxide sinks but the second largest source of methane. Monitoring of wetland methane emissions revealed large inter-site variability that is hard to explain in the framework of current biogeochemical theories. Methane production in wetlands is an anaerobic microbial driven process involving a complex set of microbial metabolisms depending on the availability of (i) energy (via the presence of specific redox couples), (ii) organic substrates and (iii) specific microbial communities. To understand the complexity of microbial drivers on wetland methane emissions and quantify their contribution, the MICADO project will set up a multidisciplinary approach linking isotope organic geochemistry and environmental microbiology to assess microbial functioning in situ. As an organic geochemist I have developed an innovative approach to trace in situ microbial activity via compound specific carbon isotope analysis of microbe macromolecules and organic metabolites. The host institution is a leader in France in environmental microbiology and biogeochemistry developing high-throughput metagenomics and microbial rate assessments, for which I will be trained during the MICADO project. These techniques are highly complementary and combined they will provide a comprehensive knowledge on microbial metabolisms involved in organic matter degradation encompassing their complexity and interactions. This will revisit the relationships between organic substrate availability and microbial communities and will contribute at estimating the impact of microbial activity on wetland methane emissions. This project will give me the opportunity to acquire fundamental knowledge and to develop original lines of research that will consolidate my position as an independent scientist in biogeochemistry.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"CENTRE NATIONAL DE LA RECHERCHE SCIENTIFIQUE CNRS","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229249","id":"891624","acronym":"CuTAN","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"Copper-Catalyzed Multicomponent Reactions in Tandem Processes for Target Molecule Synthesis","startDate":"2021-02-01","endDate":"2023-01-31","projectUrl":"","objective":"The invention of processes that can form several bonds, stereocentres and rings in a single process is key to a sustainable future in synthetic chemistry. Multicomponent reactions and tandem procedures are two strategies that enable the rapid build-up of molecular complexity from simple reagents. By combining these two strategies into a single procedure, the diversity, complexity and value of products can be further enhanced along with the efficiency and economy of their construction. In this project, Dr Satpathi will develop novel copper-catalyzed multicomponent couplings of unsaturated hydrocarbons (e.g. allenes, enynes) with imines and boron reagents. These procedures will provide high-value amine products with universally high regio-, diastero- and enantiocontrol. The products will bear a variety of synthetic handles, for example, amino, alkynyl/alkenyl, and boryl groups, thus the products are primed for subsequent transformation. Dr Satpathi will exploit this functionality in tandem intramolecular couplings (e.g. intramolecular Suzuki/Buchwald-Hartwig reactions) to provide core cyclic structures of drug molecules and natural products. Thus, through a tandem procedure of; 1) copper-catalyzed borofunctionalization, and; 2) subsequent transition-metal catalyzed cyclization, he will gain efficient access to highly sought-after complex molecules. Overall, the process will provide high-value, chiral, cyclic motifs from abundant, achiral, linear substrates. Finally, Dr Satpathi has identified the phthalide-isoquinoline family of alkaloids as target molecules to display the power of his tandem methodology. Dr Satpathi has devised a novel route, which begins with our tandem multifunctionalization/cyclization reaction, to provide a range of these important alkaloids. The chosen alkaloids are of particular interest as they display a range of bioactivities – for example as natural products, receptor antagonists and on-market drugs.","totalCost":"212933,76","ecMaxContribution":"212933,76","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"THE UNIVERSITY OF MANCHESTER","coordinatorCountry":"UK","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229239","id":"887259","acronym":"ALEHOOP","status":"SIGNED","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3","frameworkProgramme":"H2020","title":"Biorefineries for the valorisation of macroalgal residual biomass and legume processing by-products to obtain new protein value chains for high-value food and feed applications","startDate":"2020-06-01","endDate":"2024-05-31","projectUrl":"","objective":"ALEHOOP provides the demonstration at pilot scale of both sustainable macroalgae and legume-based biorefineries for the recovery of low-cost dietary proteins from alga-based and plant residual biomass and their validation to meet market requirements of consumers and industry in the food and feed sectors. In these sectors, consumers are demanding affordable functional natural proteins from alternative sources and industry is demanding low-cost bio-based protein formulations with better performance and higher sustainability. \nCurrent protein demand for the 7.3 billion inhabitants of the world is approximately 202 Mt. Due to the rise in meat consumption more proteins are therefore required for animal feeding. To satisfy the current protein demand, Europe imports over 30 Mt of soy from the Americas each year mainly for animal feeding, entailing 95% dependency of EU on imported soy. Current sources of proteins are becoming unsustainable from an economic and environmental perspective for Europe resulting in concerns for sustainability and food security and leading to search for new alternative proteins. \nALEHOOP addresses the obtaining of proteins from green macroalgal blooms, brown seaweed by-products from algae processors and legume processing by-products (peas, lupines, beans and lentils) as alternative protein sources for animal feeding (case of green seaweed) and food applications (case of brown seaweed and legume by-products), since they are low cost and under-exploited biomass that do not compete with traditional food crops for space and resources. This will reduce EU´s dependency on protein imports and contribute to our raw material security. The new proteins will be validated in foods for elderly, sporty and overweight people, vegetarians and healthy consumers as well as for animal feed creating cross-sectorial interconnection between these value chains and supporting the projected business plan.","totalCost":"6718370","ecMaxContribution":"5140274,41","call":"H2020-BBI-JTI-2019","fundingScheme":"BBI-IA-DEMO","coordinator":"CONTACTICA S.L.","coordinatorCountry":"ES","participants":"CENTIV GMBH;ALGINOR ASA;FUNDACION TECNALIA RESEARCH & INNOVATION;INDUKERN,S.A.;ASOCIACION NACIONAL DE FABRICANTES DE CONSERVAS DE PESCADOS Y MARISCOS-CENTRO TECNICO NACIONAL DE CONSERVACION DE PRODUCTOS DE LA PESCA;BIOZOON GMBH;EIGEN VERMOGEN VAN HET INSTITUUT VOOR LANDBOUW- EN VISSERIJONDERZOEK;BIOSURYA SL;VYZKUMNY USTAV VETERINARNIHO LEKARSTVI;NUTRITION SCIENCES;TECHNOLOGICAL UNIVERSITY DUBLIN;GARLAN, S.COOP.;ISANATUR SPAIN SL;UNIVERSIDAD DE VIGO;UNIVERSIDAD DE CADIZ","participantCountries":"DE;NO;ES;BE;CZ;IE","subjects":""}
|
||||||
|
{"rcn":"229258","id":"892834","acronym":"DENVPOC","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"qPCR Microfluidics point-of-care platform for dengue diagnosis","startDate":"2020-05-18","endDate":"2022-05-17","projectUrl":"","objective":"As a result of Global climate change and fast urbanization, global outbreaks of Dengue (DENV)/ Zika(ZIKV)/Chikungunya(CHIKV) virus have the potential to occur. The most common pathway of these infections in humans is through the female Aedes mosquito vector. DENV is an exanthematous febrile disease with varied clinical manifestations and progressions . Due to similarities in symptoms between DENV and ZIKV and CHIKV, it is difficult to make a differential diagnosis, impeding appropriate, timely medical intervention. Furthermore, cross-reactivity with ZIKV, which was recently related to microcephaly, is a serious issue. In 2016, in Brazil alone, there were 4180 microcephaly cases reported instead of 163 cases, more in line with yearly expected projections , , Thus, the sooner an accurate diagnostic which differentiates DENV from the other manifestations is critical; most especially at the early stages of the infection, to have a reliable diagnosis in pregnant women. In 2016, the OMS emergency committee declared that the outbreaks and the potentially resultant neurological disorders in Brazil were an important international state of emergency in public health, as a result of the associated secondary effects; these diseases became a Global concern. This project allows developing a highly and fast Multiplex qPCR POC platform by using FASTGENE technology with a minimal amount of patient serotype. It would reduce the time of analysis (30 to 90’ for a standard) and costs. Additionally, the sample preprocessing and thermalization will shorten real-time PCR amplification time and will be integrated within the microfluidic systems. This platform can result in a commercialized product whereupon a main market target would be pregnant women and people living or traveling through/from outbreak risk areas.","totalCost":"196707,84","ecMaxContribution":"196707,84","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-SE","coordinator":"BFORCURE","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229280","id":"895716","acronym":"DoMiCoP","status":"SIGNED","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019","frameworkProgramme":"H2020","title":"The Diffusion of Migration Control Practice. Actors, Processes and Effects.","startDate":"2021-03-01","endDate":"2023-02-28","projectUrl":"","objective":"DoMiCoP develops new understandings and perspectives to study migration control in practice in the European Union by asking one main question: how and why do communities of practice develop and diffuse the knowledge required to put migration control into action? Unlike the nexus between expert knowledge, epistemic communities and policy formulation, the nexus between everyday knowledge, communities of practice and policy implementation has not yet received systematic scholarly attention. My project bridges that gap by focusing on intermediate arenas in which communities of practice take shape most notably the meetings and trainings that gather state and non-state actors involved in putting asylum, detention and removal into practice. By building on field-based methodologies (interviews and participant observations), DoMiCoP sheds ethnographic light on the role that ‘learning from abroad’ plays in the implementation of migration control in the EU. My project’s aim is threefold: 1) Identifying arenas at intermediate levels in which communities of practice take shape; 2) Analysing the communities of practice by focusing on the configurations of actors and organizations involved, the motivations underlying their involvement, the process of knowledge development in interaction, the conflicts and negotiations; 3) Revealing the role of non-state organizations (private for profit and not-for-profit). From a theoretical point of view, this project goes beyond the classical view of the implementation as a test to assess the effectiveness of policy transfers towards an analysis of policy transfer at that level of policy-making. From an empirical point of view, the project expands knowledge about less-studied venues of policy-making and provides original thick descriptions. From a methodological point of view, the project engages with qualitative methods for the study of policy diffusion and aims at responding to their main challenges through participant observation.","totalCost":"163673,28","ecMaxContribution":"163673,28","call":"H2020-MSCA-IF-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"EUROPEAN UNIVERSITY INSTITUTE","coordinatorCountry":"IT","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229297","id":"954782","acronym":"MiniLLock","status":"SIGNED","programme":"H2020-EU.3.;H2020-EU.2.3.;H2020-EU.2.1.","topics":"EIC-SMEInst-2018-2020","frameworkProgramme":"H2020","title":"Mini Launch Lock devices for small satellites","startDate":"2020-05-01","endDate":"2022-04-30","projectUrl":"","objective":"Space industry is experiencing the most important paradigm shift in its history with the rise of small satellites and megaconstellations.\nSatellite miniaturization requires to reduce significantly production and orbit launching costs. To address the\nnew challenge of this manufacturing process and switch from craftsmanship to industrialization, space industry is turning\ntowards other domains looking for new solutions, disruptive technologies, and manufacturing process.\nMini Launch Lock devices for small satellites (MiniLLock) proposes innovative actuators on the cutting edge of customer\ndemand. They offer plug and play solutions that can directly be integrated into industry for satellites robotized production.\nMiniLLock is smaller, lighter, safer, with a longer lifetime and generates significantly less shocks and vibrations than\nstandard actuators such as electromagnet and pyrotechnics. MiniLLock offers performances which have never been reached\nwith any other materials.\nNimesis is the only company that can provide such cost-effective actuators suitable to small satellite with high performances\nand reliability, enabling features previously impossible.\nMiniLLock will accelerate and leverage the commercialization of Nimesis technology and ensure Europe worldwide\nleadership\nand independence in the new space emergent environment.\nNimesis ambitions to become the global leader of this domain with a turnover of € 26 million and a market share of 28% in\n2027.","totalCost":"2413543,75","ecMaxContribution":"1689480,63","call":"H2020-EIC-SMEInst-2018-2020-3","fundingScheme":"SME-2b","coordinator":"NIMESIS TECHNOLOGY SARL","coordinatorCountry":"FR","participants":"","participantCountries":"","subjects":""}
|
||||||
|
{"rcn":"229299","id":"101003374","acronym":"NOPHOS","status":"SIGNED","programme":"H2020-EU.4.","topics":"WF-02-2019","frameworkProgramme":"H2020","title":"Unravelling protein phosphorylation mechanisms and phosphoproteome changes under nitrosative stress conditions in E.coli","startDate":"2020-07-01","endDate":"2022-06-30","projectUrl":"","objective":"Currently, we face a global antibiotic resistance crisis aggravated by the slow development of more effective and anti-resistance promoting therapeutical solutions. Protein phosphorylation (PP) has recently emerged as one of the major post-translational modification in bacteria, involved in the regulation of multiple physiological processes. In this MSCA individual fellowship application we aim to bridge the current gap in the field for prokaryotes by unravelling the unknown regulatory role of PP on proteins involved in nitrosative stress (NS) detoxification in the model bacterium E.coli. We propose to examine for the first time both global protein modifications (e.g. phosphoproteomics) under nitrogen species stress, as well as characterize PP in individual proteins involved in NS response. We will construct a network model that reflect the phosphoproteomic changes upon NS in E.coli, that may pave the way for the design of new bacterial targets. Understanding how bacteria respond to the chemical weapons of the human innate system is fundamental to develop efficient therapies. We will pioneer research on the mechanism and the regulation of nitric oxide detoxification proteins already identified as phosphorylated, by analyzing how this modification influences their stability and activity in vitro and in vivo. This project opens up new research paths on bacterial detoxification systems and signalling in general, addressing for the first time the role of PP in these processes. The proposal brings together transversal and scientific skills that will enable the researcher to lead the development of this emerging field and position herself as an expert in the area, and aims at establishing the importance of PP in NO microbial response, a novelty in this field.","totalCost":"147815,04","ecMaxContribution":"147815,04","call":"H2020-WF-02-2019","fundingScheme":"MSCA-IF-EF-ST","coordinator":"UNIVERSIDADE NOVA DE LISBOA","coordinatorCountry":"PT","participants":"","participantCountries":"","subjects":""}
|
Binary file not shown.
|
@ -61,7 +61,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>eu.dnetlib</groupId>
|
<groupId>eu.dnetlib</groupId>
|
||||||
<artifactId>dnet-openaire-broker-common</artifactId>
|
<artifactId>dnet-openaire-broker-common</artifactId>
|
||||||
<version>[2.0.1,3.0.0)</version>
|
<version>[3.0.1,4.0.0)</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
<?xml version='1.0' encoding='UTF-8'?>
|
||||||
|
<dfxml xmloutputversion='1.0'>
|
||||||
|
<metadata
|
||||||
|
xmlns='http://afflib.org/tcpflow/'
|
||||||
|
xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
|
||||||
|
xmlns:dc='http://purl.org/dc/elements/1.1/'>
|
||||||
|
<dc:type>Feature Extraction</dc:type>
|
||||||
|
</metadata>
|
||||||
|
<creator version='1.0'>
|
||||||
|
<program>TCPFLOW</program>
|
||||||
|
<version>1.5.0</version>
|
||||||
|
<build_environment>
|
||||||
|
<compiler>4.2.1 (4.2.1 Compatible Apple LLVM 11.0.0 (clang-1100.0.33.8))</compiler>
|
||||||
|
<CPPFLAGS>-D_THREAD_SAFE -pthread -I/usr/local/include -I/usr/local/include -DUTC_OFFSET=+0000 </CPPFLAGS>
|
||||||
|
<CFLAGS>-g -D_THREAD_SAFE -pthread -g -O3 -MD -Wpointer-arith -Wmissing-declarations -Wmissing-prototypes -Wshadow -Wwrite-strings -Wcast-align -Waggregate-return -Wbad-function-cast -Wcast-qual -Wundef -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wc++-compat -Wmissing-noreturn -Wall -Wstrict-prototypes -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wmissing-declarations -Wmissing-prototypes -Wshadow -Wwrite-strings -Wcast-align -Waggregate-return -Wbad-function-cast -Wcast-qual -Wundef -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wc++-compat -Wmissing-noreturn -Wall -Wstrict-prototypes</CFLAGS>
|
||||||
|
<CXXFLAGS>-g -D_THREAD_SAFE -pthread -g -O3 -Wall -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wshadow -Wwrite-strings -Wcast-align -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wmissing-noreturn -Woverloaded-virtual -Wsign-promo -funit-at-a-time -Weffc++ -std=c++11 -Wall -MD -D_FORTIFY_SOURCE=2 -Wpointer-arith -Wshadow -Wwrite-strings -Wcast-align -Wredundant-decls -Wdisabled-optimization -Wfloat-equal -Wmultichar -Wmissing-noreturn -Woverloaded-virtual -Wsign-promo -funit-at-a-time -Weffc++ </CXXFLAGS>
|
||||||
|
<LDFLAGS>-L/usr/local/lib -L/usr/local/lib </LDFLAGS>
|
||||||
|
<LIBS>-lpython2.7 -lpython2.7 -lpcap -lbz2 -lexpat -lsqlite3 -lcrypto -lssl -lcrypto -ldl -lz </LIBS>
|
||||||
|
<compilation_date>2019-10-11T01:16:58</compilation_date>
|
||||||
|
<library name="boost" version="107100"/>
|
||||||
|
<library name="sqlite" version="3.28.0" source_id="2019-04-15 14:49:49 378230ae7f4b721c8b8d83c8ceb891449685cd23b1702a57841f1be40b5daapl"/>
|
||||||
|
</build_environment>
|
||||||
|
<execution_environment>
|
||||||
|
<os_sysname>Darwin</os_sysname>
|
||||||
|
<os_release>19.5.0</os_release>
|
||||||
|
<os_version>Darwin Kernel Version 19.5.0: Tue May 26 20:41:44 PDT 2020; root:xnu-6153.121.2~2/RELEASE_X86_64</os_version>
|
||||||
|
<host>Micheles-MBP.local</host>
|
||||||
|
<arch>x86_64</arch>
|
||||||
|
<command_line>tcpflow</command_line>
|
||||||
|
<uid>501</uid>
|
||||||
|
<username>michele</username>
|
||||||
|
<start_time>2020-06-15T14:55:03Z</start_time>
|
||||||
|
</execution_environment>
|
||||||
|
</creator>
|
||||||
|
<configuration>
|
||||||
|
</configuration>
|
||||||
|
<tdelta>0</tdelta>
|
|
@ -1,9 +1,15 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.model;
|
package eu.dnetlib.dhp.broker.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class Event {
|
public class Event implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = -5936790326505781395L;
|
||||||
|
|
||||||
private String eventId;
|
private String eventId;
|
||||||
|
|
||||||
|
|
|
@ -6,17 +6,13 @@ import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.codec.digest.DigestUtils;
|
import org.apache.commons.codec.digest.DigestUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
public class EventFactory {
|
public class EventFactory {
|
||||||
|
|
||||||
|
@ -37,8 +33,7 @@ public class EventFactory {
|
||||||
final Map<String, Object> map = createMapFromResult(updateInfo);
|
final Map<String, Object> map = createMapFromResult(updateInfo);
|
||||||
|
|
||||||
final String eventId = calculateEventId(
|
final String eventId = calculateEventId(
|
||||||
updateInfo.getTopicPath(), updateInfo.getTarget().getResult().getOriginalId().get(0),
|
updateInfo.getTopicPath(), updateInfo.getTarget().getOriginalId(), updateInfo.getHighlightValueAsString());
|
||||||
updateInfo.getHighlightValueAsString());
|
|
||||||
|
|
||||||
res.setEventId(eventId);
|
res.setEventId(eventId);
|
||||||
res.setProducerId(PRODUCER_ID);
|
res.setProducerId(PRODUCER_ID);
|
||||||
|
@ -54,53 +49,31 @@ public class EventFactory {
|
||||||
private static Map<String, Object> createMapFromResult(final UpdateInfo<?> updateInfo) {
|
private static Map<String, Object> createMapFromResult(final UpdateInfo<?> updateInfo) {
|
||||||
final Map<String, Object> map = new HashMap<>();
|
final Map<String, Object> map = new HashMap<>();
|
||||||
|
|
||||||
final Result source = updateInfo.getSource().getResult();
|
final OpenaireBrokerResult source = updateInfo.getSource();
|
||||||
final Result target = updateInfo.getTarget().getResult();
|
final OpenaireBrokerResult target = updateInfo.getTarget();
|
||||||
|
|
||||||
final List<KeyValue> collectedFrom = target.getCollectedfrom();
|
map.put("target_datasource_id", target.getCollectedFromId());
|
||||||
if (collectedFrom.size() == 1) {
|
map.put("target_datasource_name", target.getCollectedFromName());
|
||||||
map.put("target_datasource_id", collectedFrom.get(0).getKey());
|
|
||||||
map.put("target_datasource_name", collectedFrom.get(0).getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<String> ids = target.getOriginalId();
|
map.put("target_publication_id", target.getOriginalId());
|
||||||
if (ids.size() > 0) {
|
|
||||||
map.put("target_publication_id", ids.get(0));
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<StructuredProperty> titles = target.getTitle();
|
final List<String> titles = target.getTitles();
|
||||||
if (titles.size() > 0) {
|
if (titles.size() > 0) {
|
||||||
map.put("target_publication_title", titles.get(0));
|
map.put("target_publication_title", titles.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
final long date = parseDateTolong(target.getDateofacceptance().getValue());
|
final long date = parseDateTolong(target.getPublicationdate());
|
||||||
if (date > 0) {
|
if (date > 0) {
|
||||||
map.put("target_dateofacceptance", date);
|
map.put("target_dateofacceptance", date);
|
||||||
}
|
}
|
||||||
|
|
||||||
final List<StructuredProperty> subjects = target.getSubject();
|
map.put("target_publication_subject_list", target.getSubjects());
|
||||||
if (subjects.size() > 0) {
|
map.put("target_publication_author_list", target.getCreators());
|
||||||
map
|
|
||||||
.put(
|
|
||||||
"target_publication_subject_list",
|
|
||||||
subjects.stream().map(StructuredProperty::getValue).collect(Collectors.toList()));
|
|
||||||
}
|
|
||||||
|
|
||||||
final List<Author> authors = target.getAuthor();
|
|
||||||
if (authors.size() > 0) {
|
|
||||||
map
|
|
||||||
.put(
|
|
||||||
"target_publication_author_list",
|
|
||||||
authors.stream().map(Author::getFullname).collect(Collectors.toList()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// PROVENANCE INFO
|
// PROVENANCE INFO
|
||||||
map.put("trust", updateInfo.getTrust());
|
map.put("trust", updateInfo.getTrust());
|
||||||
final List<KeyValue> sourceCollectedFrom = source.getCollectedfrom();
|
map.put("provenance_datasource_id", source.getCollectedFromId());
|
||||||
if (sourceCollectedFrom.size() == 1) {
|
map.put("provenance_datasource_name", source.getCollectedFromName());
|
||||||
map.put("provenance_datasource_id", sourceCollectedFrom.get(0).getKey());
|
|
||||||
map.put("provenance_datasource_name", sourceCollectedFrom.get(0).getValue());
|
|
||||||
}
|
|
||||||
map.put("provenance_publication_id_list", source.getOriginalId());
|
map.put("provenance_publication_id_list", source.getOriginalId());
|
||||||
|
|
||||||
return map;
|
return map;
|
||||||
|
|
|
@ -18,18 +18,20 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.broker.model.Event;
|
import eu.dnetlib.dhp.broker.model.Event;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.EventFinder;
|
import eu.dnetlib.dhp.broker.oa.util.EventFinder;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.EventGroup;
|
import eu.dnetlib.dhp.broker.oa.util.EventGroup;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultAggregator;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultAggregator;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.OpenaireBrokerResultAggregator;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedEntityFactory;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedEntityFactory;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelationsAggregator;
|
|
||||||
import eu.dnetlib.dhp.common.HdfsSupport;
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
@ -73,6 +75,8 @@ public class GenerateEventsApplication {
|
||||||
log.info("dedupConfigProfileId: {}", dedupConfigProfileId);
|
log.info("dedupConfigProfileId: {}", dedupConfigProfileId);
|
||||||
|
|
||||||
final SparkConf conf = new SparkConf();
|
final SparkConf conf = new SparkConf();
|
||||||
|
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
|
||||||
|
conf.registerKryoClasses(BrokerConstants.getModelClasses());
|
||||||
|
|
||||||
final DedupConfig dedupConfig = loadDedupConfig(isLookupUrl, dedupConfigProfileId);
|
final DedupConfig dedupConfig = loadDedupConfig(isLookupUrl, dedupConfigProfileId);
|
||||||
|
|
||||||
|
@ -80,13 +84,16 @@ public class GenerateEventsApplication {
|
||||||
|
|
||||||
removeOutputDir(spark, eventsPath);
|
removeOutputDir(spark, eventsPath);
|
||||||
|
|
||||||
final Dataset<Event> all = spark.emptyDataset(Encoders.kryo(Event.class));
|
spark
|
||||||
|
.emptyDataset(Encoders.kryo(Event.class))
|
||||||
for (final Class<? extends Result> r1 : BrokerConstants.RESULT_CLASSES) {
|
.union(generateEvents(spark, graphPath, Publication.class, dedupConfig))
|
||||||
all.union(generateEvents(spark, graphPath, r1, dedupConfig));
|
.union(generateEvents(spark, graphPath, eu.dnetlib.dhp.schema.oaf.Dataset.class, dedupConfig))
|
||||||
}
|
.union(generateEvents(spark, graphPath, Software.class, dedupConfig))
|
||||||
|
.union(generateEvents(spark, graphPath, OtherResearchProduct.class, dedupConfig))
|
||||||
all.write().mode(SaveMode.Overwrite).option("compression", "gzip").json(eventsPath);
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(eventsPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -101,18 +108,18 @@ public class GenerateEventsApplication {
|
||||||
final Class<SRC> sourceClass,
|
final Class<SRC> sourceClass,
|
||||||
final DedupConfig dedupConfig) {
|
final DedupConfig dedupConfig) {
|
||||||
|
|
||||||
final Dataset<ResultWithRelations> results = expandResultsWithRelations(spark, graphPath, sourceClass);
|
final Dataset<OpenaireBrokerResult> results = expandResultsWithRelations(spark, graphPath, sourceClass);
|
||||||
|
|
||||||
final Dataset<Relation> mergedRels = readPath(spark, graphPath + "/relation", Relation.class)
|
final Dataset<Relation> mergedRels = readPath(spark, graphPath + "/relation", Relation.class)
|
||||||
.filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS));
|
.filter(r -> r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS));
|
||||||
|
|
||||||
final TypedColumn<Tuple2<ResultWithRelations, Relation>, ResultGroup> aggr = new ResultAggregator()
|
final TypedColumn<Tuple2<OpenaireBrokerResult, Relation>, ResultGroup> aggr = new ResultAggregator()
|
||||||
.toColumn();
|
.toColumn();
|
||||||
|
|
||||||
return results
|
return results
|
||||||
.joinWith(mergedRels, results.col("result.id").equalTo(mergedRels.col("source")), "inner")
|
.joinWith(mergedRels, results.col("result.id").equalTo(mergedRels.col("source")), "inner")
|
||||||
.groupByKey(
|
.groupByKey(
|
||||||
(MapFunction<Tuple2<ResultWithRelations, Relation>, String>) t -> t._2.getTarget(), Encoders.STRING())
|
(MapFunction<Tuple2<OpenaireBrokerResult, Relation>, String>) t -> t._2.getTarget(), Encoders.STRING())
|
||||||
.agg(aggr)
|
.agg(aggr)
|
||||||
.map((MapFunction<Tuple2<String, ResultGroup>, ResultGroup>) t -> t._2, Encoders.kryo(ResultGroup.class))
|
.map((MapFunction<Tuple2<String, ResultGroup>, ResultGroup>) t -> t._2, Encoders.kryo(ResultGroup.class))
|
||||||
.filter(ResultGroup::isValid)
|
.filter(ResultGroup::isValid)
|
||||||
|
@ -122,7 +129,7 @@ public class GenerateEventsApplication {
|
||||||
.flatMap(group -> group.getData().iterator(), Encoders.kryo(Event.class));
|
.flatMap(group -> group.getData().iterator(), Encoders.kryo(Event.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <SRC extends Result> Dataset<ResultWithRelations> expandResultsWithRelations(
|
private static <SRC extends Result> Dataset<OpenaireBrokerResult> expandResultsWithRelations(
|
||||||
final SparkSession spark,
|
final SparkSession spark,
|
||||||
final String graphPath,
|
final String graphPath,
|
||||||
final Class<SRC> sourceClass) {
|
final Class<SRC> sourceClass) {
|
||||||
|
@ -133,16 +140,18 @@ public class GenerateEventsApplication {
|
||||||
final Dataset<Publication> publications = readPath(spark, graphPath + "/publication", Publication.class);
|
final Dataset<Publication> publications = readPath(spark, graphPath + "/publication", Publication.class);
|
||||||
|
|
||||||
final Dataset<Relation> rels = readPath(spark, graphPath + "/relation", Relation.class)
|
final Dataset<Relation> rels = readPath(spark, graphPath + "/relation", Relation.class)
|
||||||
.filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS));
|
.filter(r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS))
|
||||||
|
.cache();
|
||||||
|
|
||||||
final Dataset<ResultWithRelations> r0 = readPath(
|
final Dataset<OpenaireBrokerResult> r0 = readPath(
|
||||||
spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), Result.class)
|
spark, graphPath + "/" + sourceClass.getSimpleName().toLowerCase(), Result.class)
|
||||||
.filter(r -> r.getDataInfo().getDeletedbyinference())
|
.filter(r -> r.getDataInfo().getDeletedbyinference())
|
||||||
.map(r -> new ResultWithRelations(r), Encoders.kryo(ResultWithRelations.class));
|
.map(ConversionUtils::oafResultToBrokerResult, Encoders.kryo(OpenaireBrokerResult.class));
|
||||||
final Dataset<ResultWithRelations> r1 = join(r0, rels, relatedEntities(projects, rels, RelatedProject.class));
|
|
||||||
final Dataset<ResultWithRelations> r2 = join(r1, rels, relatedEntities(softwares, rels, RelatedProject.class));
|
final Dataset<OpenaireBrokerResult> r1 = join(r0, rels, relatedEntities(projects, rels, RelatedProject.class));
|
||||||
final Dataset<ResultWithRelations> r3 = join(r2, rels, relatedEntities(datasets, rels, RelatedProject.class));
|
final Dataset<OpenaireBrokerResult> r2 = join(r1, rels, relatedEntities(softwares, rels, RelatedProject.class));
|
||||||
final Dataset<ResultWithRelations> r4 = join(
|
final Dataset<OpenaireBrokerResult> r3 = join(r2, rels, relatedEntities(datasets, rels, RelatedProject.class));
|
||||||
|
final Dataset<OpenaireBrokerResult> r4 = join(
|
||||||
r3, rels, relatedEntities(publications, rels, RelatedProject.class));
|
r3, rels, relatedEntities(publications, rels, RelatedProject.class));
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -159,20 +168,20 @@ public class GenerateEventsApplication {
|
||||||
Encoders.kryo(clazz));
|
Encoders.kryo(clazz));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> Dataset<ResultWithRelations> join(final Dataset<ResultWithRelations> sources,
|
private static <T> Dataset<OpenaireBrokerResult> join(final Dataset<OpenaireBrokerResult> sources,
|
||||||
final Dataset<Relation> rels,
|
final Dataset<Relation> rels,
|
||||||
final Dataset<T> typedRels) {
|
final Dataset<T> typedRels) {
|
||||||
|
|
||||||
final TypedColumn<Tuple2<ResultWithRelations, T>, ResultWithRelations> aggr = new ResultWithRelationsAggregator<T>()
|
final TypedColumn<Tuple2<OpenaireBrokerResult, T>, OpenaireBrokerResult> aggr = new OpenaireBrokerResultAggregator<T>()
|
||||||
.toColumn();
|
.toColumn();
|
||||||
;
|
;
|
||||||
|
|
||||||
return sources
|
return sources
|
||||||
.joinWith(typedRels, sources.col("result.id").equalTo(rels.col("source")), "left_outer")
|
.joinWith(typedRels, sources.col("result.id").equalTo(rels.col("source")), "left_outer")
|
||||||
.groupByKey(
|
.groupByKey(
|
||||||
(MapFunction<Tuple2<ResultWithRelations, T>, String>) t -> t._1.getResult().getId(), Encoders.STRING())
|
(MapFunction<Tuple2<OpenaireBrokerResult, T>, String>) t -> t._1.getOpenaireId(), Encoders.STRING())
|
||||||
.agg(aggr)
|
.agg(aggr)
|
||||||
.map(t -> t._2, Encoders.kryo(ResultWithRelations.class));
|
.map(t -> t._2, Encoders.kryo(OpenaireBrokerResult.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <R> Dataset<R> readPath(
|
public static <R> Dataset<R> readPath(
|
||||||
|
|
|
@ -12,22 +12,20 @@ import java.util.function.Function;
|
||||||
import org.apache.commons.codec.digest.DigestUtils;
|
import org.apache.commons.codec.digest.DigestUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Publication;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
import eu.dnetlib.dhp.broker.oa.util.UpdateInfo;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
|
||||||
import eu.dnetlib.pace.config.DedupConfig;
|
import eu.dnetlib.pace.config.DedupConfig;
|
||||||
|
|
||||||
public abstract class UpdateMatcher<T> {
|
public abstract class UpdateMatcher<T> {
|
||||||
|
|
||||||
private final boolean multipleUpdate;
|
private final boolean multipleUpdate;
|
||||||
private final Function<T, Topic> topicFunction;
|
private final Function<T, Topic> topicFunction;
|
||||||
private final BiConsumer<Publication, T> compileHighlightFunction;
|
private final BiConsumer<OpenaireBrokerResult, T> compileHighlightFunction;
|
||||||
private final Function<T, String> highlightToStringFunction;
|
private final Function<T, String> highlightToStringFunction;
|
||||||
|
|
||||||
public UpdateMatcher(final boolean multipleUpdate, final Function<T, Topic> topicFunction,
|
public UpdateMatcher(final boolean multipleUpdate, final Function<T, Topic> topicFunction,
|
||||||
final BiConsumer<Publication, T> compileHighlightFunction,
|
final BiConsumer<OpenaireBrokerResult, T> compileHighlightFunction,
|
||||||
final Function<T, String> highlightToStringFunction) {
|
final Function<T, String> highlightToStringFunction) {
|
||||||
this.multipleUpdate = multipleUpdate;
|
this.multipleUpdate = multipleUpdate;
|
||||||
this.topicFunction = topicFunction;
|
this.topicFunction = topicFunction;
|
||||||
|
@ -35,19 +33,18 @@ public abstract class UpdateMatcher<T> {
|
||||||
this.highlightToStringFunction = highlightToStringFunction;
|
this.highlightToStringFunction = highlightToStringFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Collection<UpdateInfo<T>> searchUpdatesForRecord(final ResultWithRelations res,
|
public Collection<UpdateInfo<T>> searchUpdatesForRecord(final OpenaireBrokerResult res,
|
||||||
final Collection<ResultWithRelations> others,
|
final Collection<OpenaireBrokerResult> others,
|
||||||
final DedupConfig dedupConfig) {
|
final DedupConfig dedupConfig) {
|
||||||
|
|
||||||
final Map<String, UpdateInfo<T>> infoMap = new HashMap<>();
|
final Map<String, UpdateInfo<T>> infoMap = new HashMap<>();
|
||||||
|
|
||||||
for (final ResultWithRelations source : others) {
|
for (final OpenaireBrokerResult source : others) {
|
||||||
if (source != res) {
|
if (source != res) {
|
||||||
for (final T hl : findDifferences(source, res)) {
|
for (final T hl : findDifferences(source, res)) {
|
||||||
final Topic topic = getTopicFunction().apply(hl);
|
final Topic topic = getTopicFunction().apply(hl);
|
||||||
final UpdateInfo<T> info = new UpdateInfo<>(topic, hl, source, res, getCompileHighlightFunction(),
|
final UpdateInfo<T> info = new UpdateInfo<>(topic, hl, source, res, getCompileHighlightFunction(),
|
||||||
getHighlightToStringFunction(),
|
getHighlightToStringFunction(), dedupConfig);
|
||||||
dedupConfig);
|
|
||||||
final String s = DigestUtils.md5Hex(info.getHighlightValueAsString());
|
final String s = DigestUtils.md5Hex(info.getHighlightValueAsString());
|
||||||
if (!infoMap.containsKey(s) || infoMap.get(s).getTrust() < info.getTrust()) {
|
if (!infoMap.containsKey(s) || infoMap.get(s).getTrust() < info.getTrust()) {
|
||||||
} else {
|
} else {
|
||||||
|
@ -71,14 +68,14 @@ public abstract class UpdateMatcher<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract List<T> findDifferences(ResultWithRelations source, ResultWithRelations target);
|
protected abstract List<T> findDifferences(OpenaireBrokerResult source, OpenaireBrokerResult target);
|
||||||
|
|
||||||
protected static boolean isMissing(final List<Field<String>> list) {
|
protected static boolean isMissing(final List<String> list) {
|
||||||
return list == null || list.isEmpty() || StringUtils.isBlank(list.get(0).getValue());
|
return list == null || list.isEmpty() || StringUtils.isBlank(list.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected boolean isMissing(final Field<String> field) {
|
protected boolean isMissing(final String field) {
|
||||||
return field == null || StringUtils.isBlank(field.getValue());
|
return StringUtils.isBlank(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isMultipleUpdate() {
|
public boolean isMultipleUpdate() {
|
||||||
|
@ -89,7 +86,7 @@ public abstract class UpdateMatcher<T> {
|
||||||
return topicFunction;
|
return topicFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BiConsumer<Publication, T> getCompileHighlightFunction() {
|
public BiConsumer<OpenaireBrokerResult, T> getCompileHighlightFunction() {
|
||||||
return compileHighlightFunction;
|
return compileHighlightFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,45 +5,39 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.Dataset;
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedDataset;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
|
||||||
|
|
||||||
public abstract class AbstractEnrichMissingDataset
|
public abstract class AbstractEnrichMissingDataset
|
||||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Dataset> {
|
extends UpdateMatcher<Dataset> {
|
||||||
|
|
||||||
public AbstractEnrichMissingDataset(final Topic topic) {
|
public AbstractEnrichMissingDataset(final Topic topic) {
|
||||||
super(true,
|
super(true,
|
||||||
rel -> topic,
|
rel -> topic,
|
||||||
(p, rel) -> p.getDatasets().add(rel),
|
(p, rel) -> p.getDatasets().add(rel),
|
||||||
rel -> rel.getInstances().get(0).getUrl());
|
rel -> rel.getOriginalId());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract boolean filterByType(String relType);
|
protected abstract boolean filterByType(String relType);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final List<eu.dnetlib.broker.objects.Dataset> findDifferences(
|
protected final List<Dataset> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations source,
|
final OpenaireBrokerResult target) {
|
||||||
final ResultWithRelations target) {
|
|
||||||
|
|
||||||
final Set<String> existingDatasets = target
|
final Set<String> existingDatasets = target
|
||||||
.getDatasets()
|
.getDatasets()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(rel -> filterByType(rel.getRelType()))
|
.filter(rel -> filterByType(rel.getRelType()))
|
||||||
.map(RelatedDataset::getRelDataset)
|
.map(Dataset::getOriginalId)
|
||||||
.map(Dataset::getId)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getDatasets()
|
.getDatasets()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(rel -> filterByType(rel.getRelType()))
|
.filter(rel -> filterByType(rel.getRelType()))
|
||||||
.map(RelatedDataset::getRelDataset)
|
.filter(d -> !existingDatasets.contains(d.getOriginalId()))
|
||||||
.filter(d -> !existingDatasets.contains(d.getId()))
|
|
||||||
.map(ConversionUtils::oafDatasetToBrokerDataset)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,15 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedProjects;
|
package eu.dnetlib.dhp.broker.oa.matchers.relatedProjects;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.broker.objects.Project;
|
import eu.dnetlib.broker.objects.Project;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingProject
|
public class EnrichMissingProject extends UpdateMatcher<Project> {
|
||||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Project> {
|
|
||||||
|
|
||||||
public EnrichMissingProject() {
|
public EnrichMissingProject() {
|
||||||
super(true,
|
super(true,
|
||||||
|
@ -23,16 +19,11 @@ public class EnrichMissingProject
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Project> findDifferences(final ResultWithRelations source, final ResultWithRelations target) {
|
protected List<Project> findDifferences(final OpenaireBrokerResult source, final OpenaireBrokerResult target) {
|
||||||
if (source.getProjects().isEmpty()) {
|
if (target.getProjects().isEmpty()) {
|
||||||
return Arrays.asList();
|
return source.getProjects();
|
||||||
} else {
|
} else {
|
||||||
return target
|
return new ArrayList<>();
|
||||||
.getProjects()
|
|
||||||
.stream()
|
|
||||||
.map(RelatedProject::getRelProject)
|
|
||||||
.map(ConversionUtils::oafProjectToBrokerProject)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,39 +5,38 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.Project;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedProject;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
||||||
|
|
||||||
public class EnrichMoreProject extends UpdateMatcher<eu.dnetlib.broker.objects.Project> {
|
public class EnrichMoreProject extends UpdateMatcher<Project> {
|
||||||
|
|
||||||
public EnrichMoreProject() {
|
public EnrichMoreProject() {
|
||||||
super(true,
|
super(true,
|
||||||
prj -> Topic.ENRICH_MORE_PROJECT,
|
prj -> Topic.ENRICH_MORE_PROJECT,
|
||||||
(p, prj) -> p.getProjects().add(prj),
|
(p, prj) -> p.getProjects().add(prj),
|
||||||
prj -> prj.getFunder() + "::" + prj.getFundingProgram() + prj.getCode());
|
prj -> projectAsString(prj));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String projectAsString(final Project prj) {
|
||||||
|
return prj.getFunder() + "::" + prj.getFundingProgram() + "::" + prj.getCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<eu.dnetlib.broker.objects.Project> findDifferences(final ResultWithRelations source,
|
protected List<eu.dnetlib.broker.objects.Project> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
|
|
||||||
final Set<String> existingProjects = source
|
final Set<String> existingProjects = target
|
||||||
.getProjects()
|
.getProjects()
|
||||||
.stream()
|
.stream()
|
||||||
.map(RelatedProject::getRelProject)
|
.map(EnrichMoreProject::projectAsString)
|
||||||
.map(Project::getId)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return target
|
return source
|
||||||
.getProjects()
|
.getProjects()
|
||||||
.stream()
|
.stream()
|
||||||
.map(RelatedProject::getRelProject)
|
.filter(p -> !existingProjects.contains(projectAsString(p)))
|
||||||
.filter(p -> !existingProjects.contains(p.getId()))
|
|
||||||
.map(ConversionUtils::oafProjectToBrokerProject)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,21 +5,18 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.Publication;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedPublication;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
|
||||||
|
|
||||||
public abstract class AbstractEnrichMissingPublication
|
public abstract class AbstractEnrichMissingPublication extends UpdateMatcher<Publication> {
|
||||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Publication> {
|
|
||||||
|
|
||||||
public AbstractEnrichMissingPublication(final Topic topic) {
|
public AbstractEnrichMissingPublication(final Topic topic) {
|
||||||
super(true,
|
super(true,
|
||||||
rel -> topic,
|
rel -> topic,
|
||||||
(p, rel) -> p.getPublications().add(rel),
|
(p, rel) -> p.getPublications().add(rel),
|
||||||
rel -> rel.getInstances().get(0).getUrl());
|
rel -> rel.getOriginalId());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,24 +24,21 @@ public abstract class AbstractEnrichMissingPublication
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final List<eu.dnetlib.broker.objects.Publication> findDifferences(
|
protected final List<eu.dnetlib.broker.objects.Publication> findDifferences(
|
||||||
final ResultWithRelations source,
|
final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
|
|
||||||
final Set<String> existingPublications = target
|
final Set<String> existingPublications = target
|
||||||
.getPublications()
|
.getPublications()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(rel -> filterByType(rel.getRelType()))
|
.filter(rel -> filterByType(rel.getRelType()))
|
||||||
.map(RelatedPublication::getRelPublication)
|
.map(Publication::getOriginalId)
|
||||||
.map(Publication::getId)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getPublications()
|
.getPublications()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(rel -> filterByType(rel.getRelType()))
|
.filter(rel -> filterByType(rel.getRelType()))
|
||||||
.map(RelatedPublication::getRelPublication)
|
.filter(p -> !existingPublications.contains(p.getOriginalId()))
|
||||||
.filter(d -> !existingPublications.contains(d.getId()))
|
|
||||||
.map(ConversionUtils::oafResultToBrokerPublication)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,12 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware;
|
package eu.dnetlib.dhp.broker.oa.matchers.relatedSoftware;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedSoftware;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingSoftware
|
public class EnrichMissingSoftware
|
||||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Software> {
|
extends UpdateMatcher<eu.dnetlib.broker.objects.Software> {
|
||||||
|
@ -23,18 +20,13 @@ public class EnrichMissingSoftware
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
||||||
final ResultWithRelations source,
|
final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
|
|
||||||
if (source.getSoftwares().isEmpty()) {
|
if (target.getSoftwares().isEmpty()) {
|
||||||
return Arrays.asList();
|
return source.getSoftwares();
|
||||||
} else {
|
} else {
|
||||||
return target
|
return new ArrayList<>();
|
||||||
.getSoftwares()
|
|
||||||
.stream()
|
|
||||||
.map(RelatedSoftware::getRelSoftware)
|
|
||||||
.map(ConversionUtils::oafSoftwareToBrokerSoftware)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,15 +5,12 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.Software;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.RelatedSoftware;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
|
||||||
|
|
||||||
public class EnrichMoreSoftware
|
public class EnrichMoreSoftware extends UpdateMatcher<Software> {
|
||||||
extends UpdateMatcher<eu.dnetlib.broker.objects.Software> {
|
|
||||||
|
|
||||||
public EnrichMoreSoftware() {
|
public EnrichMoreSoftware() {
|
||||||
super(true,
|
super(true,
|
||||||
|
@ -24,22 +21,19 @@ public class EnrichMoreSoftware
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
protected List<eu.dnetlib.broker.objects.Software> findDifferences(
|
||||||
final ResultWithRelations source,
|
final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
|
|
||||||
final Set<String> existingSoftwares = source
|
final Set<String> existingSoftwares = source
|
||||||
.getSoftwares()
|
.getSoftwares()
|
||||||
.stream()
|
.stream()
|
||||||
.map(RelatedSoftware::getRelSoftware)
|
.map(Software::getName)
|
||||||
.map(Software::getId)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return target
|
return target
|
||||||
.getSoftwares()
|
.getSoftwares()
|
||||||
.stream()
|
.stream()
|
||||||
.map(RelatedSoftware::getRelSoftware)
|
.filter(p -> !existingSoftwares.contains(p.getName()))
|
||||||
.filter(p -> !existingSoftwares.contains(p.getId()))
|
|
||||||
.map(ConversionUtils::oafSoftwareToBrokerSoftware)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingAbstract extends UpdateMatcher<String> {
|
public class EnrichMissingAbstract extends UpdateMatcher<String> {
|
||||||
|
|
||||||
|
@ -19,13 +19,12 @@ public class EnrichMissingAbstract extends UpdateMatcher<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<String> findDifferences(final ResultWithRelations source,
|
protected List<String> findDifferences(final OpenaireBrokerResult source, final OpenaireBrokerResult target) {
|
||||||
final ResultWithRelations target) {
|
if (isMissing(target.getAbstracts()) && !isMissing(source.getAbstracts())) {
|
||||||
if (isMissing(target.getResult().getDescription()) && !isMissing(source.getResult().getDescription())) {
|
return Arrays.asList(source.getAbstracts().get(0));
|
||||||
return Arrays
|
} else {
|
||||||
.asList(source.getResult().getDescription().get(0).getValue());
|
|
||||||
}
|
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,53 +1,43 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.Author;
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
public class EnrichMissingAuthorOrcid extends UpdateMatcher<String> {
|
public class EnrichMissingAuthorOrcid extends UpdateMatcher<Author> {
|
||||||
|
|
||||||
public EnrichMissingAuthorOrcid() {
|
public EnrichMissingAuthorOrcid() {
|
||||||
super(true,
|
super(true,
|
||||||
aut -> Topic.ENRICH_MISSING_AUTHOR_ORCID,
|
aut -> Topic.ENRICH_MISSING_AUTHOR_ORCID,
|
||||||
(p, aut) -> p.getCreators().add(aut),
|
(p, aut) -> p.getCreators().add(aut),
|
||||||
aut -> aut);
|
aut -> aut.getOrcid());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<String> findDifferences(final ResultWithRelations source,
|
protected List<Author> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
|
|
||||||
final Set<String> existingOrcids = target
|
final Set<String> existingOrcids = target
|
||||||
.getResult()
|
.getCreators()
|
||||||
.getAuthor()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(Author::getPid)
|
.map(Author::getOrcid)
|
||||||
.flatMap(List::stream)
|
.filter(StringUtils::isNotBlank)
|
||||||
.filter(pid -> pid.getQualifier().getClassid().equalsIgnoreCase("orcid"))
|
|
||||||
.map(pid -> pid.getValue())
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
final List<String> list = new ArrayList<>();
|
return source
|
||||||
|
.getCreators()
|
||||||
|
.stream()
|
||||||
|
.filter(a -> StringUtils.isNotBlank(a.getOrcid()))
|
||||||
|
.filter(a -> !existingOrcids.contains(a.getOrcid()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
for (final Author author : source.getResult().getAuthor()) {
|
|
||||||
final String name = author.getFullname();
|
|
||||||
|
|
||||||
for (final StructuredProperty pid : author.getPid()) {
|
|
||||||
if (pid.getQualifier().getClassid().equalsIgnoreCase("orcid")
|
|
||||||
&& !existingOrcids.contains(pid.getValue())) {
|
|
||||||
list.add(name + " [ORCID: " + pid.getValue() + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return list;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,11 +6,10 @@ import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Instance;
|
import eu.dnetlib.broker.objects.Instance;
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingOpenAccess extends UpdateMatcher<Instance> {
|
public class EnrichMissingOpenAccess extends UpdateMatcher<Instance> {
|
||||||
|
|
||||||
|
@ -22,13 +21,12 @@ public class EnrichMissingOpenAccess extends UpdateMatcher<Instance> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Instance> findDifferences(final ResultWithRelations source,
|
protected List<Instance> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final long count = target
|
final long count = target
|
||||||
.getResult()
|
.getInstances()
|
||||||
.getInstance()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(i -> i.getAccessright().getClassid())
|
.map(Instance::getLicense)
|
||||||
.filter(right -> right.equals(BrokerConstants.OPEN_ACCESS))
|
.filter(right -> right.equals(BrokerConstants.OPEN_ACCESS))
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
|
@ -37,12 +35,9 @@ public class EnrichMissingOpenAccess extends UpdateMatcher<Instance> {
|
||||||
}
|
}
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getInstances()
|
||||||
.getInstance()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
.filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS))
|
||||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
|
||||||
.flatMap(List::stream)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,13 +5,12 @@ import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Pid;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.TypedValue;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingPid extends UpdateMatcher<Pid> {
|
public class EnrichMissingPid extends UpdateMatcher<TypedValue> {
|
||||||
|
|
||||||
public EnrichMissingPid() {
|
public EnrichMissingPid() {
|
||||||
super(true,
|
super(true,
|
||||||
|
@ -21,19 +20,17 @@ public class EnrichMissingPid extends UpdateMatcher<Pid> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Pid> findDifferences(final ResultWithRelations source,
|
protected List<TypedValue> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final long count = target.getResult().getPid().size();
|
final long count = target.getPids().size();
|
||||||
|
|
||||||
if (count > 0) {
|
if (count > 0) {
|
||||||
return Arrays.asList();
|
return Arrays.asList();
|
||||||
}
|
}
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getPids()
|
||||||
.getPid()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(ConversionUtils::oafPidToBrokerPid)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMissingPublicationDate extends UpdateMatcher<String> {
|
public class EnrichMissingPublicationDate extends UpdateMatcher<String> {
|
||||||
|
|
||||||
|
@ -19,13 +19,14 @@ public class EnrichMissingPublicationDate extends UpdateMatcher<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<String> findDifferences(final ResultWithRelations source,
|
protected List<String> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
if (isMissing(target.getResult().getDateofacceptance())
|
|
||||||
&& !isMissing(source.getResult().getDateofacceptance())) {
|
if (isMissing(target.getPublicationdate()) && !isMissing(source.getPublicationdate())) {
|
||||||
return Arrays.asList(source.getResult().getDateofacceptance().getValue());
|
return Arrays.asList(source.getPublicationdate());
|
||||||
}
|
} else {
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,42 +5,38 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.TypedValue;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
public class EnrichMissingSubject extends UpdateMatcher<Pair<String, String>> {
|
public class EnrichMissingSubject extends UpdateMatcher<TypedValue> {
|
||||||
|
|
||||||
public EnrichMissingSubject() {
|
public EnrichMissingSubject() {
|
||||||
super(true,
|
super(true,
|
||||||
pair -> Topic.fromPath("ENRICH/MISSING/SUBJECT/" + pair.getLeft()),
|
s -> Topic.fromPath("ENRICH/MISSING/SUBJECT/" + s.getType()),
|
||||||
(p, pair) -> p.getSubjects().add(pair.getRight()),
|
(p, s) -> p.getSubjects().add(s),
|
||||||
pair -> pair.getLeft() + "::" + pair.getRight());
|
s -> subjectAsString(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Pair<String, String>> findDifferences(final ResultWithRelations source,
|
protected List<TypedValue> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final Set<String> existingTypes = target
|
final Set<String> existingSubject = target
|
||||||
.getResult()
|
.getSubjects()
|
||||||
.getSubject()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(StructuredProperty::getQualifier)
|
.map(s -> subjectAsString(s))
|
||||||
.map(Qualifier::getClassid)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getSubjects()
|
||||||
.getPid()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(pid -> !existingTypes.contains(pid.getQualifier().getClassid()))
|
.filter(s -> !existingSubject.contains(subjectAsString(s)))
|
||||||
.map(ConversionUtils::oafSubjectToPair)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String subjectAsString(final TypedValue s) {
|
||||||
|
return s.getType() + "::" + s.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,11 +6,10 @@ import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Instance;
|
import eu.dnetlib.broker.objects.Instance;
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
import eu.dnetlib.dhp.broker.oa.util.BrokerConstants;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMoreOpenAccess extends UpdateMatcher<Instance> {
|
public class EnrichMoreOpenAccess extends UpdateMatcher<Instance> {
|
||||||
|
|
||||||
|
@ -22,24 +21,19 @@ public class EnrichMoreOpenAccess extends UpdateMatcher<Instance> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Instance> findDifferences(final ResultWithRelations source,
|
protected List<Instance> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final Set<String> urls = target
|
final Set<String> urls = target
|
||||||
.getResult()
|
.getInstances()
|
||||||
.getInstance()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
.filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS))
|
||||||
.map(i -> i.getUrl())
|
.map(i -> i.getUrl())
|
||||||
.flatMap(List::stream)
|
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getInstances()
|
||||||
.getInstance()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(i -> i.getAccessright().getClassid().equals(BrokerConstants.OPEN_ACCESS))
|
.filter(i -> i.getLicense().equals(BrokerConstants.OPEN_ACCESS))
|
||||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
|
||||||
.flatMap(List::stream)
|
|
||||||
.filter(i -> !urls.contains(i.getUrl()))
|
.filter(i -> !urls.contains(i.getUrl()))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,38 +5,37 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Pid;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.TypedValue;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMorePid extends UpdateMatcher<Pid> {
|
public class EnrichMorePid extends UpdateMatcher<TypedValue> {
|
||||||
|
|
||||||
public EnrichMorePid() {
|
public EnrichMorePid() {
|
||||||
super(true,
|
super(true,
|
||||||
pid -> Topic.ENRICH_MORE_PID,
|
pid -> Topic.ENRICH_MORE_PID,
|
||||||
(p, pid) -> p.getPids().add(pid),
|
(p, pid) -> p.getPids().add(pid),
|
||||||
pid -> pid.getType() + "::" + pid.getValue());
|
pid -> pidAsString(pid));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Pid> findDifferences(final ResultWithRelations source,
|
protected List<TypedValue> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final Set<String> existingPids = target
|
final Set<String> existingPids = target
|
||||||
.getResult()
|
.getPids()
|
||||||
.getPid()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue())
|
.map(pid -> pidAsString(pid))
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getPids()
|
||||||
.getPid()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(pid -> !existingPids.contains(pid.getQualifier().getClassid() + "::" + pid.getValue()))
|
.filter(pid -> !existingPids.contains(pidAsString(pid)))
|
||||||
.map(ConversionUtils::oafPidToBrokerPid)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String pidAsString(final TypedValue pid) {
|
||||||
|
return pid.getType() + "::" + pid.getValue();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,39 +5,37 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.TypedValue;
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
|
|
||||||
public class EnrichMoreSubject extends UpdateMatcher<Pair<String, String>> {
|
public class EnrichMoreSubject extends UpdateMatcher<TypedValue> {
|
||||||
|
|
||||||
public EnrichMoreSubject() {
|
public EnrichMoreSubject() {
|
||||||
super(true,
|
super(true,
|
||||||
pair -> Topic.fromPath("ENRICH/MORE/SUBJECT/" + pair.getLeft()),
|
s -> Topic.fromPath("ENRICH/MORE/SUBJECT/" + s.getType()),
|
||||||
(p, pair) -> p.getSubjects().add(pair.getRight()),
|
(p, s) -> p.getSubjects().add(s),
|
||||||
pair -> pair.getLeft() + "::" + pair.getRight());
|
s -> subjectAsString(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<Pair<String, String>> findDifferences(final ResultWithRelations source,
|
protected List<TypedValue> findDifferences(final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target) {
|
final OpenaireBrokerResult target) {
|
||||||
final Set<String> existingSubjects = target
|
final Set<String> existingSubjects = target
|
||||||
.getResult()
|
.getSubjects()
|
||||||
.getSubject()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(pid -> pid.getQualifier().getClassid() + "::" + pid.getValue())
|
.map(pid -> subjectAsString(pid))
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getResult()
|
.getPids()
|
||||||
.getPid()
|
|
||||||
.stream()
|
.stream()
|
||||||
.filter(pid -> !existingSubjects.contains(pid.getQualifier().getClassid() + "::" + pid.getValue()))
|
.filter(s -> !existingSubjects.contains(subjectAsString(s)))
|
||||||
.map(ConversionUtils::oafSubjectToPair)
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String subjectAsString(final TypedValue s) {
|
||||||
|
return s.getType() + "::" + s.getValue();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,12 @@
|
||||||
package eu.dnetlib.dhp.broker.oa.util;
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.broker.model.Event;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
|
||||||
|
|
||||||
public class BrokerConstants {
|
public class BrokerConstants {
|
||||||
|
|
||||||
|
@ -18,7 +17,11 @@ public class BrokerConstants {
|
||||||
public static final float MIN_TRUST = 0.25f;
|
public static final float MIN_TRUST = 0.25f;
|
||||||
public static final float MAX_TRUST = 1.00f;
|
public static final float MAX_TRUST = 1.00f;
|
||||||
|
|
||||||
public static final List<Class<? extends Result>> RESULT_CLASSES = Arrays
|
public static Class<?>[] getModelClasses() {
|
||||||
.asList(Publication.class, Dataset.class, Software.class, OtherResearchProduct.class);
|
final Set<Class<?>> list = new HashSet<>();
|
||||||
|
list.addAll(Arrays.asList(ModelSupport.getOafModelClasses()));
|
||||||
|
list.addAll(Arrays.asList(ResultGroup.class, Event.class));
|
||||||
|
return list.toArray(new Class[] {});
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,8 @@ import org.dom4j.DocumentHelper;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.broker.objects.Pid;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import eu.dnetlib.broker.objects.TypedValue;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
import eu.dnetlib.dhp.schema.oaf.ExternalReference;
|
import eu.dnetlib.dhp.schema.oaf.ExternalReference;
|
||||||
|
@ -41,8 +42,8 @@ public class ConversionUtils {
|
||||||
}).collect(Collectors.toList());
|
}).collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Pid oafPidToBrokerPid(final StructuredProperty sp) {
|
public static TypedValue oafPidToBrokerPid(final StructuredProperty sp) {
|
||||||
return sp != null ? new Pid()
|
return sp != null ? new TypedValue()
|
||||||
.setValue(sp.getValue())
|
.setValue(sp.getValue())
|
||||||
.setType(sp.getQualifier().getClassid()) : null;
|
.setType(sp.getQualifier().getClassid()) : null;
|
||||||
}
|
}
|
||||||
|
@ -54,7 +55,7 @@ public class ConversionUtils {
|
||||||
public static final eu.dnetlib.broker.objects.Dataset oafDatasetToBrokerDataset(final Dataset d) {
|
public static final eu.dnetlib.broker.objects.Dataset oafDatasetToBrokerDataset(final Dataset d) {
|
||||||
return d != null ? new eu.dnetlib.broker.objects.Dataset()
|
return d != null ? new eu.dnetlib.broker.objects.Dataset()
|
||||||
.setOriginalId(d.getOriginalId().get(0))
|
.setOriginalId(d.getOriginalId().get(0))
|
||||||
.setTitles(structPropList(d.getTitle()))
|
.setTitle(structPropValue(d.getTitle()))
|
||||||
.setPids(d.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
.setPids(d.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
||||||
.setInstances(
|
.setInstances(
|
||||||
d
|
d
|
||||||
|
@ -63,26 +64,46 @@ public class ConversionUtils {
|
||||||
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||||
.flatMap(List::stream)
|
.flatMap(List::stream)
|
||||||
.collect(Collectors.toList()))
|
.collect(Collectors.toList()))
|
||||||
.setCollectedFrom(d.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList()))
|
.setCollectedFrom(d.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null))
|
||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final eu.dnetlib.broker.objects.Publication oafResultToBrokerPublication(final Result result) {
|
public static eu.dnetlib.broker.objects.Publication oafPublicationToBrokerPublication(final Publication p) {
|
||||||
|
return p != null ? new eu.dnetlib.broker.objects.Publication()
|
||||||
|
.setOriginalId(p.getOriginalId().get(0))
|
||||||
|
.setTitle(structPropValue(p.getTitle()))
|
||||||
|
.setPids(p.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
||||||
|
.setInstances(
|
||||||
|
p
|
||||||
|
.getInstance()
|
||||||
|
.stream()
|
||||||
|
.map(ConversionUtils::oafInstanceToBrokerInstances)
|
||||||
|
.flatMap(List::stream)
|
||||||
|
.collect(Collectors.toList()))
|
||||||
|
.setCollectedFrom(p.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null))
|
||||||
|
: null;
|
||||||
|
}
|
||||||
|
|
||||||
return result != null ? new eu.dnetlib.broker.objects.Publication()
|
public static final OpenaireBrokerResult oafResultToBrokerResult(final Result result) {
|
||||||
|
|
||||||
|
return result != null ? new OpenaireBrokerResult()
|
||||||
|
.setOpenaireId(result.getId())
|
||||||
.setOriginalId(result.getOriginalId().get(0))
|
.setOriginalId(result.getOriginalId().get(0))
|
||||||
|
.setTypology(result.getResulttype().getClassid())
|
||||||
.setTitles(structPropList(result.getTitle()))
|
.setTitles(structPropList(result.getTitle()))
|
||||||
.setAbstracts(fieldList(result.getDescription()))
|
.setAbstracts(fieldList(result.getDescription()))
|
||||||
.setLanguage(result.getLanguage().getClassid())
|
.setLanguage(result.getLanguage().getClassid())
|
||||||
.setSubjects(structPropList(result.getSubject()))
|
.setSubjects(structPropTypedList(result.getSubject()))
|
||||||
.setCreators(result.getAuthor().stream().map(Author::getFullname).collect(Collectors.toList()))
|
.setCreators(
|
||||||
.setPublicationdate(result.getDateofcollection())
|
result.getAuthor().stream().map(ConversionUtils::oafAuthorToBrokerAuthor).collect(Collectors.toList()))
|
||||||
|
.setPublicationdate(result.getDateofacceptance().getValue())
|
||||||
.setPublisher(fieldValue(result.getPublisher()))
|
.setPublisher(fieldValue(result.getPublisher()))
|
||||||
.setEmbargoenddate(fieldValue(result.getEmbargoenddate()))
|
.setEmbargoenddate(fieldValue(result.getEmbargoenddate()))
|
||||||
.setContributor(fieldList(result.getContributor()))
|
.setContributor(fieldList(result.getContributor()))
|
||||||
.setJournal(
|
.setJournal(
|
||||||
result instanceof Publication ? oafJournalToBrokerJournal(((Publication) result).getJournal()) : null)
|
result instanceof Publication ? oafJournalToBrokerJournal(((Publication) result).getJournal()) : null)
|
||||||
.setCollectedFrom(result.getCollectedfrom().stream().map(KeyValue::getValue).collect(Collectors.toList()))
|
.setCollectedFromId(result.getCollectedfrom().stream().map(KeyValue::getKey).findFirst().orElse(null))
|
||||||
|
.setCollectedFromName(result.getCollectedfrom().stream().map(KeyValue::getValue).findFirst().orElse(null))
|
||||||
.setPids(result.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
.setPids(result.getPid().stream().map(ConversionUtils::oafPidToBrokerPid).collect(Collectors.toList()))
|
||||||
.setInstances(
|
.setInstances(
|
||||||
result
|
result
|
||||||
|
@ -100,6 +121,30 @@ public class ConversionUtils {
|
||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static List<TypedValue> structPropTypedList(final List<StructuredProperty> list) {
|
||||||
|
return list
|
||||||
|
.stream()
|
||||||
|
.map(
|
||||||
|
p -> new TypedValue()
|
||||||
|
.setValue(p.getValue())
|
||||||
|
.setType(p.getQualifier().getClassid()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static eu.dnetlib.broker.objects.Author oafAuthorToBrokerAuthor(final Author author) {
|
||||||
|
return author != null ? new eu.dnetlib.broker.objects.Author()
|
||||||
|
.setFullname(author.getFullname())
|
||||||
|
.setOrcid(
|
||||||
|
author
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.filter(pid -> pid.getQualifier().getClassid().equalsIgnoreCase("orcid"))
|
||||||
|
.map(pid -> pid.getValue())
|
||||||
|
.findFirst()
|
||||||
|
.orElse(null))
|
||||||
|
: null;
|
||||||
|
}
|
||||||
|
|
||||||
private static eu.dnetlib.broker.objects.Journal oafJournalToBrokerJournal(final Journal journal) {
|
private static eu.dnetlib.broker.objects.Journal oafJournalToBrokerJournal(final Journal journal) {
|
||||||
return journal != null ? new eu.dnetlib.broker.objects.Journal()
|
return journal != null ? new eu.dnetlib.broker.objects.Journal()
|
||||||
.setName(journal.getName())
|
.setName(journal.getName())
|
||||||
|
|
|
@ -4,6 +4,7 @@ package eu.dnetlib.dhp.broker.oa.util;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.broker.model.EventFactory;
|
import eu.dnetlib.dhp.broker.model.EventFactory;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
import eu.dnetlib.dhp.broker.oa.matchers.UpdateMatcher;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsReferencedBy;
|
import eu.dnetlib.dhp.broker.oa.matchers.relatedDatasets.EnrichMissingDatasetIsReferencedBy;
|
||||||
|
@ -30,7 +31,6 @@ import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreOpenAccess;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMorePid;
|
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMorePid;
|
||||||
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreSubject;
|
import eu.dnetlib.dhp.broker.oa.matchers.simple.EnrichMoreSubject;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
import eu.dnetlib.dhp.broker.oa.util.aggregators.simple.ResultGroup;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.pace.config.DedupConfig;
|
import eu.dnetlib.pace.config.DedupConfig;
|
||||||
|
|
||||||
public class EventFinder {
|
public class EventFinder {
|
||||||
|
@ -68,7 +68,7 @@ public class EventFinder {
|
||||||
public static EventGroup generateEvents(final ResultGroup results, final DedupConfig dedupConfig) {
|
public static EventGroup generateEvents(final ResultGroup results, final DedupConfig dedupConfig) {
|
||||||
final List<UpdateInfo<?>> list = new ArrayList<>();
|
final List<UpdateInfo<?>> list = new ArrayList<>();
|
||||||
|
|
||||||
for (final ResultWithRelations target : results.getData()) {
|
for (final OpenaireBrokerResult target : results.getData()) {
|
||||||
for (final UpdateMatcher<?> matcher : matchers) {
|
for (final UpdateMatcher<?> matcher : matchers) {
|
||||||
list.addAll(matcher.searchUpdatesForRecord(target, results.getData(), dedupConfig));
|
list.addAll(matcher.searchUpdatesForRecord(target, results.getData(), dedupConfig));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util;
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
@ -10,14 +9,11 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.Instance;
|
||||||
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
import eu.dnetlib.broker.objects.OpenAireEventPayload;
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.broker.objects.Provenance;
|
import eu.dnetlib.broker.objects.Provenance;
|
||||||
import eu.dnetlib.broker.objects.Publication;
|
|
||||||
import eu.dnetlib.dhp.broker.model.Topic;
|
import eu.dnetlib.dhp.broker.model.Topic;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.pace.config.DedupConfig;
|
import eu.dnetlib.pace.config.DedupConfig;
|
||||||
import eu.dnetlib.pace.model.MapDocument;
|
import eu.dnetlib.pace.model.MapDocument;
|
||||||
import eu.dnetlib.pace.tree.support.TreeProcessor;
|
import eu.dnetlib.pace.tree.support.TreeProcessor;
|
||||||
|
@ -29,11 +25,11 @@ public final class UpdateInfo<T> {
|
||||||
|
|
||||||
private final T highlightValue;
|
private final T highlightValue;
|
||||||
|
|
||||||
private final ResultWithRelations source;
|
private final OpenaireBrokerResult source;
|
||||||
|
|
||||||
private final ResultWithRelations target;
|
private final OpenaireBrokerResult target;
|
||||||
|
|
||||||
private final BiConsumer<Publication, T> compileHighlight;
|
private final BiConsumer<OpenaireBrokerResult, T> compileHighlight;
|
||||||
|
|
||||||
private final Function<T, String> highlightToString;
|
private final Function<T, String> highlightToString;
|
||||||
|
|
||||||
|
@ -41,9 +37,9 @@ public final class UpdateInfo<T> {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(UpdateInfo.class);
|
private static final Logger log = LoggerFactory.getLogger(UpdateInfo.class);
|
||||||
|
|
||||||
public UpdateInfo(final Topic topic, final T highlightValue, final ResultWithRelations source,
|
public UpdateInfo(final Topic topic, final T highlightValue, final OpenaireBrokerResult source,
|
||||||
final ResultWithRelations target,
|
final OpenaireBrokerResult target,
|
||||||
final BiConsumer<Publication, T> compileHighlight,
|
final BiConsumer<OpenaireBrokerResult, T> compileHighlight,
|
||||||
final Function<T, String> highlightToString,
|
final Function<T, String> highlightToString,
|
||||||
final DedupConfig dedupConfig) {
|
final DedupConfig dedupConfig) {
|
||||||
this.topic = topic;
|
this.topic = topic;
|
||||||
|
@ -52,22 +48,23 @@ public final class UpdateInfo<T> {
|
||||||
this.target = target;
|
this.target = target;
|
||||||
this.compileHighlight = compileHighlight;
|
this.compileHighlight = compileHighlight;
|
||||||
this.highlightToString = highlightToString;
|
this.highlightToString = highlightToString;
|
||||||
this.trust = calculateTrust(dedupConfig, source.getResult(), target.getResult());
|
this.trust = calculateTrust(dedupConfig, source, target);
|
||||||
}
|
}
|
||||||
|
|
||||||
public T getHighlightValue() {
|
public T getHighlightValue() {
|
||||||
return highlightValue;
|
return highlightValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResultWithRelations getSource() {
|
public OpenaireBrokerResult getSource() {
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResultWithRelations getTarget() {
|
public OpenaireBrokerResult getTarget() {
|
||||||
return target;
|
return target;
|
||||||
}
|
}
|
||||||
|
|
||||||
private float calculateTrust(final DedupConfig dedupConfig, final Result r1, final Result r2) {
|
private float calculateTrust(final DedupConfig dedupConfig, final OpenaireBrokerResult r1,
|
||||||
|
final OpenaireBrokerResult r2) {
|
||||||
try {
|
try {
|
||||||
final ObjectMapper objectMapper = new ObjectMapper();
|
final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
final MapDocument doc1 = MapDocumentUtil
|
final MapDocument doc1 = MapDocumentUtil
|
||||||
|
@ -103,26 +100,18 @@ public final class UpdateInfo<T> {
|
||||||
|
|
||||||
public OpenAireEventPayload asBrokerPayload() {
|
public OpenAireEventPayload asBrokerPayload() {
|
||||||
|
|
||||||
final Publication p = ConversionUtils.oafResultToBrokerPublication(getSource().getResult());
|
compileHighlight.accept(target, getHighlightValue());
|
||||||
compileHighlight.accept(p, getHighlightValue());
|
|
||||||
|
|
||||||
final Publication hl = new Publication();
|
final OpenaireBrokerResult hl = new OpenaireBrokerResult();
|
||||||
compileHighlight.accept(hl, getHighlightValue());
|
compileHighlight.accept(hl, getHighlightValue());
|
||||||
|
|
||||||
final String provId = getSource().getResult().getOriginalId().stream().findFirst().orElse(null);
|
final String provId = getSource().getOriginalId();
|
||||||
final String provRepo = getSource()
|
final String provRepo = getSource().getCollectedFromName();
|
||||||
.getResult()
|
|
||||||
.getCollectedfrom()
|
|
||||||
.stream()
|
|
||||||
.map(KeyValue::getValue)
|
|
||||||
.findFirst()
|
|
||||||
.orElse(null);
|
|
||||||
final String provUrl = getSource()
|
final String provUrl = getSource()
|
||||||
.getResult()
|
.getInstances()
|
||||||
.getInstance()
|
|
||||||
.stream()
|
.stream()
|
||||||
.map(Instance::getUrl)
|
.map(Instance::getUrl)
|
||||||
.flatMap(List::stream)
|
|
||||||
.findFirst()
|
.findFirst()
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
;
|
;
|
||||||
|
@ -130,7 +119,7 @@ public final class UpdateInfo<T> {
|
||||||
final Provenance provenance = new Provenance().setId(provId).setRepositoryName(provRepo).setUrl(provUrl);
|
final Provenance provenance = new Provenance().setId(provId).setRepositoryName(provRepo).setUrl(provUrl);
|
||||||
|
|
||||||
return new OpenAireEventPayload()
|
return new OpenAireEventPayload()
|
||||||
.setPublication(p)
|
.setPublication(target)
|
||||||
.setHighlight(hl)
|
.setHighlight(hl)
|
||||||
.setTrust(trust)
|
.setTrust(trust)
|
||||||
.setProvenance(provenance);
|
.setProvenance(provenance);
|
||||||
|
|
|
@ -5,11 +5,11 @@ import org.apache.spark.sql.Encoder;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.expressions.Aggregator;
|
import org.apache.spark.sql.expressions.Aggregator;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class ResultAggregator extends Aggregator<Tuple2<ResultWithRelations, Relation>, ResultGroup, ResultGroup> {
|
public class ResultAggregator extends Aggregator<Tuple2<OpenaireBrokerResult, Relation>, ResultGroup, ResultGroup> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -22,7 +22,7 @@ public class ResultAggregator extends Aggregator<Tuple2<ResultWithRelations, Rel
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResultGroup reduce(final ResultGroup group, final Tuple2<ResultWithRelations, Relation> t) {
|
public ResultGroup reduce(final ResultGroup group, final Tuple2<OpenaireBrokerResult, Relation> t) {
|
||||||
return group.addElement(t._1);
|
return group.addElement(t._1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.broker.oa.util.aggregators.withRels.ResultWithRelations;
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
|
||||||
public class ResultGroup implements Serializable {
|
public class ResultGroup implements Serializable {
|
||||||
|
|
||||||
|
@ -14,13 +14,13 @@ public class ResultGroup implements Serializable {
|
||||||
*/
|
*/
|
||||||
private static final long serialVersionUID = -3360828477088669296L;
|
private static final long serialVersionUID = -3360828477088669296L;
|
||||||
|
|
||||||
private final List<ResultWithRelations> data = new ArrayList<>();
|
private final List<OpenaireBrokerResult> data = new ArrayList<>();
|
||||||
|
|
||||||
public List<ResultWithRelations> getData() {
|
public List<OpenaireBrokerResult> getData() {
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResultGroup addElement(final ResultWithRelations elem) {
|
public ResultGroup addElement(final OpenaireBrokerResult elem) {
|
||||||
data.add(elem);
|
data.add(elem);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
|
import org.apache.spark.sql.Encoder;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.expressions.Aggregator;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OpenaireBrokerResult;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class OpenaireBrokerResultAggregator<T>
|
||||||
|
extends Aggregator<Tuple2<OpenaireBrokerResult, T>, OpenaireBrokerResult, OpenaireBrokerResult> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = -3687878788861013488L;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public OpenaireBrokerResult zero() {
|
||||||
|
return new OpenaireBrokerResult();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public OpenaireBrokerResult finish(final OpenaireBrokerResult g) {
|
||||||
|
return g;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public OpenaireBrokerResult reduce(final OpenaireBrokerResult g, final Tuple2<OpenaireBrokerResult, T> t) {
|
||||||
|
if (g.getOriginalId() == null) {
|
||||||
|
return t._1;
|
||||||
|
} else if (t._2 instanceof RelatedSoftware) {
|
||||||
|
g.getSoftwares().add(((RelatedSoftware) t._2).getRelSoftware());
|
||||||
|
} else if (t._2 instanceof RelatedDataset) {
|
||||||
|
g.getDatasets().add(((RelatedDataset) t._2).getRelDataset());
|
||||||
|
} else if (t._2 instanceof RelatedPublication) {
|
||||||
|
g.getPublications().add(((RelatedPublication) t._2).getRelPublication());
|
||||||
|
} else if (t._2 instanceof RelatedProject) {
|
||||||
|
g.getProjects().add(((RelatedProject) t._2).getRelProject());
|
||||||
|
}
|
||||||
|
return g;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public OpenaireBrokerResult merge(final OpenaireBrokerResult g1, final OpenaireBrokerResult g2) {
|
||||||
|
if (g1.getOriginalId() != null) {
|
||||||
|
g1.getSoftwares().addAll(g2.getSoftwares());
|
||||||
|
g1.getDatasets().addAll(g2.getDatasets());
|
||||||
|
g1.getPublications().addAll(g2.getPublications());
|
||||||
|
g1.getProjects().addAll(g2.getProjects());
|
||||||
|
return g1;
|
||||||
|
} else {
|
||||||
|
return g2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Encoder<OpenaireBrokerResult> bufferEncoder() {
|
||||||
|
return Encoders.kryo(OpenaireBrokerResult.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Encoder<OpenaireBrokerResult> outputEncoder() {
|
||||||
|
return Encoders.kryo(OpenaireBrokerResult.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,10 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import java.io.Serializable;
|
||||||
|
|
||||||
public class RelatedDataset {
|
import eu.dnetlib.broker.objects.Dataset;
|
||||||
|
|
||||||
|
public class RelatedDataset implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 774487705184038324L;
|
||||||
private final String source;
|
private final String source;
|
||||||
private final String relType;
|
private final String relType;
|
||||||
private final Dataset relDataset;
|
private final Dataset relDataset;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.broker.oa.util.ConversionUtils;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
import eu.dnetlib.dhp.schema.oaf.Project;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
|
@ -9,20 +10,25 @@ import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
public class RelatedEntityFactory {
|
public class RelatedEntityFactory {
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static <RT, T> RT newRelatedEntity(final String sourceId, final String relType, final T target,
|
public static <RT, T> RT newRelatedEntity(final String sourceId,
|
||||||
|
final String relType,
|
||||||
|
final T target,
|
||||||
final Class<RT> clazz) {
|
final Class<RT> clazz) {
|
||||||
|
|
||||||
if (clazz == RelatedProject.class) {
|
if (clazz == RelatedProject.class) {
|
||||||
return (RT) new RelatedProject(sourceId, relType, (Project) target);
|
return (RT) new RelatedProject(sourceId, relType,
|
||||||
}
|
ConversionUtils.oafProjectToBrokerProject((Project) target));
|
||||||
if (clazz == RelatedSoftware.class) {
|
} else if (clazz == RelatedSoftware.class) {
|
||||||
return (RT) new RelatedSoftware(sourceId, relType, (Software) target);
|
return (RT) new RelatedSoftware(sourceId, relType,
|
||||||
}
|
ConversionUtils.oafSoftwareToBrokerSoftware((Software) target));
|
||||||
if (clazz == RelatedDataset.class) {
|
} else if (clazz == RelatedDataset.class) {
|
||||||
return (RT) new RelatedDataset(sourceId, relType, (Dataset) target);
|
return (RT) new RelatedDataset(sourceId, relType,
|
||||||
}
|
ConversionUtils.oafDatasetToBrokerDataset((Dataset) target));
|
||||||
if (clazz == RelatedPublication.class) {
|
} else if (clazz == RelatedPublication.class) {
|
||||||
return (RT) new RelatedPublication(sourceId, relType, (Publication) target);
|
return (RT) new RelatedPublication(sourceId, relType,
|
||||||
}
|
ConversionUtils.oafPublicationToBrokerPublication((Publication) target));
|
||||||
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
import java.io.Serializable;
|
||||||
|
|
||||||
public class RelatedProject {
|
import eu.dnetlib.broker.objects.Project;
|
||||||
|
|
||||||
|
public class RelatedProject implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 4941437626549329870L;
|
||||||
|
|
||||||
private final String source;
|
private final String source;
|
||||||
private final String relType;
|
private final String relType;
|
||||||
|
|
|
@ -1,9 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
import java.io.Serializable;
|
||||||
|
|
||||||
public class RelatedPublication {
|
import eu.dnetlib.broker.objects.Publication;
|
||||||
|
|
||||||
|
public class RelatedPublication implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 9021609640411395128L;
|
||||||
|
|
||||||
private final String source;
|
private final String source;
|
||||||
private final String relType;
|
private final String relType;
|
||||||
|
|
|
@ -1,10 +1,16 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
import java.io.Serializable;
|
||||||
|
|
||||||
public class RelatedSoftware {
|
import eu.dnetlib.broker.objects.Software;
|
||||||
|
|
||||||
|
public class RelatedSoftware implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 7573383356943300157L;
|
||||||
private final String source;
|
private final String source;
|
||||||
private final String relType;
|
private final String relType;
|
||||||
private final Software relSoftware;
|
private final Software relSoftware;
|
||||||
|
|
|
@ -1,55 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
|
|
||||||
public class ResultWithRelations implements Serializable {
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = -1368401915974311571L;
|
|
||||||
|
|
||||||
private Result result;
|
|
||||||
|
|
||||||
private final List<RelatedDataset> datasets = new ArrayList<>();
|
|
||||||
private final List<RelatedPublication> publications = new ArrayList<>();
|
|
||||||
private final List<RelatedSoftware> softwares = new ArrayList<>();
|
|
||||||
private final List<RelatedProject> projects = new ArrayList<>();
|
|
||||||
|
|
||||||
public ResultWithRelations() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultWithRelations(final Result result) {
|
|
||||||
this.result = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Result getResult() {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedDataset> getDatasets() {
|
|
||||||
return datasets;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedPublication> getPublications() {
|
|
||||||
return publications;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedSoftware> getSoftwares() {
|
|
||||||
return softwares;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedProject> getProjects() {
|
|
||||||
return projects;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResult(final Result result) {
|
|
||||||
this.result = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,68 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.broker.oa.util.aggregators.withRels;
|
|
||||||
|
|
||||||
import org.apache.spark.sql.Encoder;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
import org.apache.spark.sql.expressions.Aggregator;
|
|
||||||
|
|
||||||
import scala.Tuple2;
|
|
||||||
|
|
||||||
public class ResultWithRelationsAggregator<T>
|
|
||||||
extends Aggregator<Tuple2<ResultWithRelations, T>, ResultWithRelations, ResultWithRelations> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
private static final long serialVersionUID = -3687878788861013488L;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResultWithRelations zero() {
|
|
||||||
return new ResultWithRelations();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResultWithRelations finish(final ResultWithRelations g) {
|
|
||||||
return g;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResultWithRelations reduce(final ResultWithRelations g, final Tuple2<ResultWithRelations, T> t) {
|
|
||||||
if (g.getResult() == null) {
|
|
||||||
return t._1;
|
|
||||||
} else if (t._2 instanceof RelatedSoftware) {
|
|
||||||
g.getSoftwares().add((RelatedSoftware) t._2);
|
|
||||||
} else if (t._2 instanceof RelatedDataset) {
|
|
||||||
g.getDatasets().add((RelatedDataset) t._2);
|
|
||||||
} else if (t._2 instanceof RelatedPublication) {
|
|
||||||
g.getPublications().add((RelatedPublication) t._2);
|
|
||||||
} else if (t._2 instanceof RelatedProject) {
|
|
||||||
g.getProjects().add((RelatedProject) t._2);
|
|
||||||
}
|
|
||||||
return g;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ResultWithRelations merge(final ResultWithRelations g1, final ResultWithRelations g2) {
|
|
||||||
if (g1.getResult() != null) {
|
|
||||||
g1.getSoftwares().addAll(g2.getSoftwares());
|
|
||||||
g1.getDatasets().addAll(g2.getDatasets());
|
|
||||||
g1.getPublications().addAll(g2.getPublications());
|
|
||||||
g1.getProjects().addAll(g2.getProjects());
|
|
||||||
return g1;
|
|
||||||
} else {
|
|
||||||
return g2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Encoder<ResultWithRelations> bufferEncoder() {
|
|
||||||
return Encoders.kryo(ResultWithRelations.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Encoder<ResultWithRelations> outputEncoder() {
|
|
||||||
return Encoders.kryo(ResultWithRelations.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
File diff suppressed because one or more lines are too long
|
@ -1,14 +1,18 @@
|
||||||
<configuration>
|
<configuration>
|
||||||
<property>
|
<property>
|
||||||
<name>jobTracker</name>
|
<name>jobTracker</name>
|
||||||
<value>yarnRM</value>
|
<value>hadoop-rm3.garr-pa1.d4science.org:8032</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>nameNode</name>
|
<name>nameNode</name>
|
||||||
<value>hdfs://nameservice1</value>
|
<value>hdfs://hadoop-rm1.garr-pa1.d4science.org:8020</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>oozie.action.sharelib.for.java</name>
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
<value>spark2</value>
|
<value>spark2</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
|
@ -16,7 +20,23 @@
|
||||||
<value>true</value>
|
<value>true</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>oozie.launcher.mapreduce.map.java.opts</name>
|
<name>hive_metastore_uris</name>
|
||||||
<value>-Xmx4g</value>
|
<value>thrift://hadoop-edge2.garr-pa1.d4science.org:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<value>http://hadoop-edge1.garr-pa1.d4science.org:18089/</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<value>/user/spark/spark2ApplicationHistory</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorAppListener"</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorQueryListener"</value>
|
||||||
</property>
|
</property>
|
||||||
</configuration>
|
</configuration>
|
|
@ -50,6 +50,8 @@ import org.apache.commons.lang3.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.common.DbClient;
|
import eu.dnetlib.dhp.common.DbClient;
|
||||||
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
|
||||||
|
@ -171,7 +173,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
||||||
final Datasource ds = new Datasource();
|
final Datasource ds = new Datasource();
|
||||||
|
|
||||||
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
ds.setId(createOpenaireId(10, rs.getString("datasourceid"), true));
|
||||||
ds.setOriginalId(Arrays.asList(rs.getString("datasourceid")));
|
ds.setOriginalId(Arrays.asList((String[]) rs.getArray("identities").getArray()));
|
||||||
ds
|
ds
|
||||||
.setCollectedfrom(
|
.setCollectedfrom(
|
||||||
listKeyValues(
|
listKeyValues(
|
||||||
|
|
|
@ -126,9 +126,16 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
|
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='URL']")) {
|
||||||
url.add(((Node) o).getText().trim());
|
url.add(((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
|
for (final Object o : doc
|
||||||
|
.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='landingPage']")) {
|
||||||
|
url.add(((Node) o).getText().trim());
|
||||||
|
}
|
||||||
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
|
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='URL']")) {
|
||||||
url.add(((Node) o).getText().trim());
|
url.add(((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
|
for (final Object o : doc.selectNodes("//datacite:identifier[@identifierType='landingPage']")) {
|
||||||
|
url.add(((Node) o).getText().trim());
|
||||||
|
}
|
||||||
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
|
for (final Object o : doc.selectNodes("//datacite:alternateIdentifier[@alternateIdentifierType='DOI']")) {
|
||||||
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
url.add(HTTP_DX_DOI_PREIFX + ((Node) o).getText().trim());
|
||||||
}
|
}
|
||||||
|
@ -367,11 +374,13 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
res
|
res
|
||||||
.addAll(
|
.addAll(
|
||||||
prepareListStructPropsWithValidQualifier(
|
prepareListStructPropsWithValidQualifier(
|
||||||
doc, "//datacite:identifier[@identifierType != 'URL']", "@identifierType", DNET_PID_TYPES, info));
|
doc, "//datacite:identifier[@identifierType != 'URL' and @identifierType != 'landingPage']",
|
||||||
|
"@identifierType", DNET_PID_TYPES, info));
|
||||||
res
|
res
|
||||||
.addAll(
|
.addAll(
|
||||||
prepareListStructPropsWithValidQualifier(
|
prepareListStructPropsWithValidQualifier(
|
||||||
doc, "//datacite:alternateIdentifier[@alternateIdentifierType != 'URL']",
|
doc,
|
||||||
|
"//datacite:alternateIdentifier[@alternateIdentifierType != 'URL' and @alternateIdentifierType != 'landingPage']",
|
||||||
"@alternateIdentifierType", DNET_PID_TYPES, info));
|
"@alternateIdentifierType", DNET_PID_TYPES, info));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ public class Vocabulary implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public VocabularyTerm getTermBySynonym(final String syn) {
|
public VocabularyTerm getTermBySynonym(final String syn) {
|
||||||
return getTerm(synonyms.get(syn));
|
return getTerm(synonyms.get(syn.toLowerCase()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public Qualifier getTermAsQualifier(final String termId) {
|
public Qualifier getTermAsQualifier(final String termId) {
|
||||||
|
|
|
@ -3,7 +3,6 @@ package eu.dnetlib.dhp.oa.graph.raw.common;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.function.Supplier;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@ -14,7 +13,7 @@ import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
public class VocabularyGroup implements Serializable {
|
public class VocabularyGroup implements Serializable {
|
||||||
|
|
||||||
public static final String VOCABULARIES_XQUERY = "for $x in collection(' /db/DRIVER/VocabularyDSResources/VocabularyDSResourceType') \n"
|
public static final String VOCABULARIES_XQUERY = "for $x in collection('/db/DRIVER/VocabularyDSResources/VocabularyDSResourceType') \n"
|
||||||
+
|
+
|
||||||
"let $vocid := $x//VOCABULARY_NAME/@code\n" +
|
"let $vocid := $x//VOCABULARY_NAME/@code\n" +
|
||||||
"let $vocname := $x//VOCABULARY_NAME/text()\n" +
|
"let $vocname := $x//VOCABULARY_NAME/text()\n" +
|
||||||
|
@ -46,7 +45,7 @@ public class VocabularyGroup implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
vocs.addTerm(vocId, termId, termName);
|
vocs.addTerm(vocId, termId, termName);
|
||||||
vocs.addSynonyms(vocId, termId, termId);
|
// vocs.addSynonyms(vocId, termId, termId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,7 +57,7 @@ public class VocabularyGroup implements Serializable {
|
||||||
final String syn = arr[2].trim();
|
final String syn = arr[2].trim();
|
||||||
|
|
||||||
vocs.addSynonyms(vocId, termId, syn);
|
vocs.addSynonyms(vocId, termId, syn);
|
||||||
vocs.addSynonyms(vocId, termId, termId);
|
// vocs.addSynonyms(vocId, termId, termId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,7 +134,7 @@ public class VocabularyGroup implements Serializable {
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(vocs.get(id))
|
.ofNullable(vocs.get(id))
|
||||||
.orElseThrow(() -> new IllegalArgumentException("missing vocabulary id: " + vocId))
|
.orElseThrow(() -> new IllegalArgumentException("missing vocabulary id: " + vocId))
|
||||||
.addSynonym(syn, termId);
|
.addSynonym(syn.toLowerCase(), termId);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,7 +159,7 @@ public class DatasetScholexplorerParser extends AbstractScholexplorerParser {
|
||||||
.setDescription(
|
.setDescription(
|
||||||
descs
|
descs
|
||||||
.stream()
|
.stream()
|
||||||
.map(it -> it.length() < 10000 ? it : it.substring(0, 10000))
|
// .map(it -> it.length() < 10000 ? it : it.substring(0, 10000))
|
||||||
.map(
|
.map(
|
||||||
it -> {
|
it -> {
|
||||||
final Field<String> d = new Field<>();
|
final Field<String> d = new Field<>();
|
||||||
|
|
|
@ -213,10 +213,10 @@ public class PublicationScholexplorerParser extends AbstractScholexplorerParser
|
||||||
.setValue(
|
.setValue(
|
||||||
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='description']"));
|
VtdUtilityParser.getSingleValue(ap, vn, "//*[local-name()='description']"));
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(description.getValue())
|
// if (StringUtils.isNotBlank(description.getValue())
|
||||||
&& description.getValue().length() > 10000) {
|
// && description.getValue().length() > 10000) {
|
||||||
description.setValue(description.getValue().substring(0, 10000));
|
// description.setValue(description.getValue().substring(0, 10000));
|
||||||
}
|
// }
|
||||||
|
|
||||||
parsedObject.setDescription(Collections.singletonList(description));
|
parsedObject.setDescription(Collections.singletonList(description));
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
DROP VIEW IF EXISTS ${hiveDbName}.result;
|
DROP VIEW IF EXISTS ${hiveDbName}.result;
|
||||||
|
|
||||||
CREATE VIEW IF NOT EXISTS result as
|
CREATE VIEW IF NOT EXISTS result as
|
||||||
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.publication p
|
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.publication p
|
||||||
union all
|
union all
|
||||||
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.dataset d
|
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.dataset d
|
||||||
union all
|
union all
|
||||||
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.software s
|
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.software s
|
||||||
union all
|
union all
|
||||||
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, instance from ${hiveDbName}.otherresearchproduct o;
|
select id, dateofcollection, title, publisher, bestaccessright, datainfo, collectedfrom, pid, author, resulttype, language, country, subject, description, dateofacceptance, embargoenddate, resourcetype, context, externalreference, instance from ${hiveDbName}.otherresearchproduct o;
|
||||||
|
|
|
@ -47,8 +47,8 @@
|
||||||
|
|
||||||
<decision name="ReuseContent">
|
<decision name="ReuseContent">
|
||||||
<switch>
|
<switch>
|
||||||
<case to="ResetWorkingPath">${wf:conf('reuseContent') eq false}</case>
|
<case to="ImportEntitiesFromMongo">${wf:conf('reuseContent') eq false}</case>
|
||||||
<case to="ResetTargetPath">${wf:conf('reuseContent') eq true}</case>
|
<case to="ConvertXML2Entity">${wf:conf('reuseContent') eq true}</case>
|
||||||
<default to="ResetWorkingPath"/>
|
<default to="ResetWorkingPath"/>
|
||||||
</switch>
|
</switch>
|
||||||
</decision>
|
</decision>
|
||||||
|
|
|
@ -60,8 +60,8 @@ public class CleaningFunctionTest {
|
||||||
|
|
||||||
assertNotNull(p_out);
|
assertNotNull(p_out);
|
||||||
|
|
||||||
assertEquals("eng", p_out.getLanguage().getClassid());
|
assertEquals("und", p_out.getLanguage().getClassid());
|
||||||
assertEquals("English", p_out.getLanguage().getClassname());
|
assertEquals("Undetermined", p_out.getLanguage().getClassname());
|
||||||
|
|
||||||
assertEquals("0018", p_out.getInstance().get(0).getInstancetype().getClassid());
|
assertEquals("0018", p_out.getInstance().get(0).getInstancetype().getClassid());
|
||||||
assertEquals("Annotation", p_out.getInstance().get(0).getInstancetype().getClassname());
|
assertEquals("Annotation", p_out.getInstance().get(0).getInstancetype().getClassname());
|
||||||
|
|
|
@ -0,0 +1,200 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.graph.reflections;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.lang.reflect.Field;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
public class ReflectionTest {
|
||||||
|
|
||||||
|
private final Cleaner cleaner = new Cleaner();
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testObject() throws Exception {
|
||||||
|
final Publication pub = new Publication();
|
||||||
|
pub.setTitle("openaire guidelines");
|
||||||
|
pub.getAuthors().add(new Author("Michele Artini", new Prop("aa-001", "orcid")));
|
||||||
|
pub.getAuthors().add(new Author("Claudio Atzori", new Prop("aa-002", "orcid")));
|
||||||
|
pub.getAuthors().add(new Author("Alessia Bardi", new Prop("aa-003", "orcid")));
|
||||||
|
pub.getSubjects().add(new Prop("infrastructures", "keyword"));
|
||||||
|
pub.getSubjects().add(new Prop("digital libraries", "keyword"));
|
||||||
|
|
||||||
|
cleaner.clean(pub);
|
||||||
|
|
||||||
|
System.out.println(pub);
|
||||||
|
|
||||||
|
assertEquals("OPENAIRE GUIDELINES", pub.getTitle());
|
||||||
|
|
||||||
|
assertEquals("MICHELE ARTINI", pub.getAuthors().get(0).getName());
|
||||||
|
assertEquals("CLAUDIO ATZORI", pub.getAuthors().get(1).getName());
|
||||||
|
assertEquals("ALESSIA BARDI", pub.getAuthors().get(2).getName());
|
||||||
|
|
||||||
|
assertEquals("dnet:aa-001", pub.getAuthors().get(0).getId().getId());
|
||||||
|
assertEquals("dnet:aa-002", pub.getAuthors().get(1).getId().getId());
|
||||||
|
assertEquals("dnet:aa-003", pub.getAuthors().get(2).getId().getId());
|
||||||
|
assertEquals("dnet:orcid", pub.getAuthors().get(0).getId().getName());
|
||||||
|
assertEquals("dnet:orcid", pub.getAuthors().get(1).getId().getName());
|
||||||
|
assertEquals("dnet:orcid", pub.getAuthors().get(2).getId().getName());
|
||||||
|
|
||||||
|
assertEquals("dnet:infrastructures", pub.getSubjects().get(0).getId());
|
||||||
|
assertEquals("dnet:keyword", pub.getSubjects().get(0).getName());
|
||||||
|
assertEquals("dnet:digital libraries", pub.getSubjects().get(1).getId());
|
||||||
|
assertEquals("dnet:keyword", pub.getSubjects().get(1).getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
class Cleaner {
|
||||||
|
|
||||||
|
public void clean(final Object o) throws IllegalArgumentException, IllegalAccessException {
|
||||||
|
if (isPrimitive(o)) {
|
||||||
|
return;
|
||||||
|
} else if (isIterable(o.getClass())) {
|
||||||
|
for (final Object elem : (Iterable<?>) o) {
|
||||||
|
clean(elem);
|
||||||
|
}
|
||||||
|
} else if (hasMapping(o)) {
|
||||||
|
mapObject(o);
|
||||||
|
} else {
|
||||||
|
for (final Field f : o.getClass().getDeclaredFields()) {
|
||||||
|
f.setAccessible(true);
|
||||||
|
final Object val = f.get(o);
|
||||||
|
if (isPrimitive(val)) {
|
||||||
|
f.set(o, cleanValue(f.get(o)));
|
||||||
|
} else if (hasMapping(val)) {
|
||||||
|
mapObject(val);
|
||||||
|
} else {
|
||||||
|
clean(f.get(o));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean hasMapping(final Object o) {
|
||||||
|
return o.getClass() == Prop.class;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void mapObject(final Object o) {
|
||||||
|
if (o.getClass() == Prop.class) {
|
||||||
|
((Prop) o).setId("dnet:" + ((Prop) o).getId());
|
||||||
|
((Prop) o).setName("dnet:" + ((Prop) o).getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object cleanValue(final Object o) {
|
||||||
|
if (o.getClass() == String.class) {
|
||||||
|
return ((String) o).toUpperCase();
|
||||||
|
} else {
|
||||||
|
return o;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isIterable(final Class<?> cl) {
|
||||||
|
return Iterable.class.isAssignableFrom(cl);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isPrimitive(final Object o) {
|
||||||
|
return o.getClass() == String.class;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class Publication {
|
||||||
|
|
||||||
|
private String title;
|
||||||
|
private final List<Author> authors = new ArrayList<>();
|
||||||
|
private final List<Prop> subjects = new ArrayList<>();
|
||||||
|
|
||||||
|
public String getTitle() {
|
||||||
|
return title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTitle(final String title) {
|
||||||
|
this.title = title;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Author> getAuthors() {
|
||||||
|
return authors;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Prop> getSubjects() {
|
||||||
|
return subjects;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.format("Publication [title=%s, authors=%s, subjects=%s]", title, authors, subjects);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
class Prop {
|
||||||
|
|
||||||
|
private String id;
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
public Prop(final String id, final String name) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(final String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(final String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.format("Prop [id=%s, name=%s]", id, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
class Author {
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
private Prop id;
|
||||||
|
|
||||||
|
public Author(final String name, final Prop id) {
|
||||||
|
this.name = name;
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(final String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Prop getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(final Prop id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.format("Author [name=%s, id=%s]", name, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -281,8 +281,8 @@
|
||||||
"value": "VIRTA"
|
"value": "VIRTA"
|
||||||
},
|
},
|
||||||
"instancetype": {
|
"instancetype": {
|
||||||
"classid": "Comentario",
|
"classid": "Comment/debate",
|
||||||
"classname": "Comentario",
|
"classname": "Comment/debate",
|
||||||
"schemeid": "dnet:publication_resource",
|
"schemeid": "dnet:publication_resource",
|
||||||
"schemename": "dnet:publication_resource"
|
"schemename": "dnet:publication_resource"
|
||||||
},
|
},
|
||||||
|
@ -317,8 +317,8 @@
|
||||||
"vol": ""
|
"vol": ""
|
||||||
},
|
},
|
||||||
"language": {
|
"language": {
|
||||||
"classid": "en",
|
"classid": "UNKNOWN",
|
||||||
"classname": "en",
|
"classname": "UNKNOWN",
|
||||||
"schemeid": "dnet:languages",
|
"schemeid": "dnet:languages",
|
||||||
"schemename": "dnet:languages"
|
"schemename": "dnet:languages"
|
||||||
},
|
},
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -6,9 +6,10 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "identities",
|
"field": "identities",
|
||||||
"type": "not_used",
|
"type": "array",
|
||||||
"value": [
|
"value": [
|
||||||
"274269ac6f3b::2579-5449",
|
"274269ac6f3b::2579-5449",
|
||||||
|
"piwik:13",
|
||||||
null
|
null
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,376 @@
|
||||||
|
package eu.dnetlib.dhp.export
|
||||||
|
|
||||||
|
import java.time.LocalDateTime
|
||||||
|
import java.time.format.DateTimeFormatter
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.PacePerson
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.{Author, DataInfo, Dataset, Field, Instance, KeyValue, Publication, Qualifier, Relation, StructuredProperty}
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.{DLIDataset, DLIPublication, DLIRelation}
|
||||||
|
import org.apache.commons.lang3.StringUtils
|
||||||
|
|
||||||
|
import scala.collection.JavaConverters._
|
||||||
|
|
||||||
|
|
||||||
|
case class DLIExternalReference(id: String, url: String, sitename: String, label: String, pid: String, classId: String) {}
|
||||||
|
|
||||||
|
object DLIToOAF {
|
||||||
|
|
||||||
|
|
||||||
|
val collectedFromMap: Map[String, KeyValue] = Map(
|
||||||
|
"dli_________::r3d100010527" -> generateKeyValue("10|re3data_____::c2a591f440598b63d854556beaf01591", "European Nucleotide Archive"),
|
||||||
|
"dli_________::r3d100010255" -> generateKeyValue("10|re3data_____::480d275ed6f9666ee76d6a1215eabf26", "Inter-university Consortium for Political and Social Research"),
|
||||||
|
"dli_________::r3d100011868" -> generateKeyValue("10|re3data_____::db814dc656a911b556dba42a331cebe9", "Mendeley Data"),
|
||||||
|
"dli_________::elsevier" -> generateKeyValue("10|openaire____::8f87e10869299a5fe80b315695296b88", "Elsevier"),
|
||||||
|
"dli_________::openaire" -> generateKeyValue("10|infrastruct_::f66f1bd369679b5b077dcdf006089556", "OpenAIRE"),
|
||||||
|
"dli_________::thomsonreuters" -> generateKeyValue("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "Crossref"),
|
||||||
|
"dli_________::r3d100010216" -> generateKeyValue("10|re3data_____::0fd79429de04343dbbec705d9b5f429f", "4TU.Centre for Research Data"),
|
||||||
|
"dli_________::r3d100010134" -> generateKeyValue("10|re3data_____::9633d1e8c4309c833c2c442abeb0cfeb", "PANGAEA"),
|
||||||
|
"dli_________::ieee" -> generateKeyValue("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "Crossref"),
|
||||||
|
"dli_________::r3d100010197" -> generateKeyValue("10|re3data_____::9fd1d79973f7fda60cbe1d82e3819a68", "The Cambridge Structural Database"),
|
||||||
|
"dli_________::nature" -> generateKeyValue("10|openaire____::6e380d9cf51138baec8480f5a0ce3a2e", "Springer Nature"),
|
||||||
|
"dli_________::datacite" -> generateKeyValue("10|openaire____::9e3be59865b2c1c335d32dae2fe7b254", "Datacite"),
|
||||||
|
"dli_________::r3d100010578" -> generateKeyValue("10|re3data_____::c4d751f29a7568011a4c80136b30b444", "IEDA"),
|
||||||
|
"dli_________::r3d100010464" -> generateKeyValue("10|re3data_____::23e2a81591099828f6b83a1c83150666", "Research Data Australia"),
|
||||||
|
"dli_________::r3d100010327" -> generateKeyValue("10|re3data_____::a644620b81135243dc9acc15d2362246", "Worldwide Protein Data Bank"),
|
||||||
|
"dli_________::pubmed" -> generateKeyValue("10|opendoar____::eda80a3d5b344bc40f3bc04f65b7a357", "PubMed Central"),
|
||||||
|
"dli_________::europe_pmc__" -> generateKeyValue("10|opendoar____::8b6dd7db9af49e67306feb59a8bdc52c", "Europe PubMed Central"),
|
||||||
|
"dli_________::crossref" -> generateKeyValue("10|openaire____::081b82f96300b6a6e3d282bad31cb6e2", "Crossref")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
val relationTypeMapping: Map[String, (String, String)] = Map(
|
||||||
|
"IsReferencedBy" -> ("isRelatedTo", "relationship"),
|
||||||
|
"References" -> ("isRelatedTo", "relationship"),
|
||||||
|
"IsRelatedTo" -> ("isRelatedTo", "relationship"),
|
||||||
|
"IsSupplementedBy" -> ("IsSupplementedBy", "supplement"),
|
||||||
|
"Cites" -> ("cites", "citation"),
|
||||||
|
"Unknown" -> ("isRelatedTo", "relationship"),
|
||||||
|
"IsSourceOf" -> ("isRelatedTo", "relationship"),
|
||||||
|
"IsCitedBy" -> ("IsCitedBy", "citation"),
|
||||||
|
"Reviews" -> ("reviews", "review"),
|
||||||
|
"Describes" -> ("isRelatedTo", "relationship"),
|
||||||
|
"HasAssociationWith" -> ("isRelatedTo", "relationship")
|
||||||
|
)
|
||||||
|
|
||||||
|
val expectecdPidType = List("uniprot", "ena", "chembl", "ncbi-n", "ncbi-p", "genbank", "pdb", "url")
|
||||||
|
|
||||||
|
|
||||||
|
val filteredURL = List(
|
||||||
|
"www.ebi.ac.uk",
|
||||||
|
"www.uniprot.org",
|
||||||
|
"f1000.com",
|
||||||
|
"en.wikipedia.org",
|
||||||
|
"flybase.org",
|
||||||
|
"www.yeastgenome.org",
|
||||||
|
"research.bioinformatics.udel.edu",
|
||||||
|
"cancer.sanger.ac.uk",
|
||||||
|
"www.iedb.org",
|
||||||
|
"www.crd.york.ac.uk",
|
||||||
|
"www.wormbase.org",
|
||||||
|
"web.expasy.org",
|
||||||
|
"www.hal.inserm.fr",
|
||||||
|
"sabiork.h-its.org",
|
||||||
|
"zfin.org",
|
||||||
|
"www.pombase.org",
|
||||||
|
"www.guidetopharmacology.org",
|
||||||
|
"reactome.org"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def filterPid(p: StructuredProperty): Boolean = {
|
||||||
|
if (expectecdPidType.contains(p.getQualifier.getClassname) && p.getQualifier.getClassname.equalsIgnoreCase("url"))
|
||||||
|
if (filteredURL.exists(u => p.getValue.contains(u)))
|
||||||
|
return true
|
||||||
|
else
|
||||||
|
return false
|
||||||
|
expectecdPidType.contains(p.getQualifier.getClassname)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def extractTitle(titles: java.util.List[StructuredProperty]): String = {
|
||||||
|
|
||||||
|
if (titles == null)
|
||||||
|
return null
|
||||||
|
|
||||||
|
val label = titles.asScala.map(p => p.getValue).find(p => p.nonEmpty)
|
||||||
|
label.orNull
|
||||||
|
}
|
||||||
|
|
||||||
|
def convertDLIDatasetToExternalReference(dataset: DLIDataset): DLIExternalReference = {
|
||||||
|
val currentId = generateId(dataset.getId)
|
||||||
|
val pids = dataset.getPid.asScala.filter(filterPid)
|
||||||
|
|
||||||
|
if (pids == null || pids.isEmpty)
|
||||||
|
return null
|
||||||
|
|
||||||
|
val pid: StructuredProperty = pids.head
|
||||||
|
|
||||||
|
|
||||||
|
pid.getQualifier.getClassname match {
|
||||||
|
case "uniprot" => DLIExternalReference(generateId(dataset.getId), s"https://www.uniprot.org/uniprot/${pid.getValue}", "UniProt", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "ena" =>
|
||||||
|
if(pid.getValue!= null && pid.getValue.nonEmpty && pid.getValue.length>7)
|
||||||
|
DLIExternalReference(generateId(dataset.getId), s"https://www.ebi.ac.uk/ena/data/view/${pid.getValue.substring(0, 8)}", "European Nucleotide Archive", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
else
|
||||||
|
null
|
||||||
|
case "chembl" => DLIExternalReference(generateId(dataset.getId), s"https://www.ebi.ac.uk/chembl/compound_report_card/${pid.getValue}", "ChEMBL", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "ncbi-n" => DLIExternalReference(generateId(dataset.getId), s"https://www.ncbi.nlm.nih.gov/nuccore/${pid.getValue}", "Nucleotide Database", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "ncbi-p" => DLIExternalReference(generateId(dataset.getId), s"https://www.ncbi.nlm.nih.gov/nuccore/${pid.getValue}", "Nucleotide Database", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "genbank" => DLIExternalReference(generateId(dataset.getId), s"https://www.ncbi.nlm.nih.gov/nuccore/${pid.getValue}", "GenBank", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "pdb" => DLIExternalReference(generateId(dataset.getId), s"https://www.ncbi.nlm.nih.gov/nuccore/${pid.getValue}", "Protein Data Bank", extractTitle(dataset.getTitle), pid.getValue, "accessionNumber")
|
||||||
|
case "url" => DLIExternalReference(generateId(dataset.getId), pid.getValue, "", extractTitle(dataset.getTitle), pid.getValue, "url")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def convertDLIPublicationToOAF(p: DLIPublication): Publication = {
|
||||||
|
|
||||||
|
val result = new Publication
|
||||||
|
result.setId(generateId(p.getId))
|
||||||
|
result.setDataInfo(generateDataInfo(invisibile = true))
|
||||||
|
if (p.getCollectedfrom == null || p.getCollectedfrom.size() == 0 || (p.getCollectedfrom.size() == 1 && p.getCollectedfrom.get(0) == null))
|
||||||
|
return null
|
||||||
|
|
||||||
|
result.setCollectedfrom(p.getCollectedfrom.asScala.map(c => collectedFromMap.getOrElse(c.getKey, null)).asJava)
|
||||||
|
result.setPid(p.getPid)
|
||||||
|
result.setDateofcollection(p.getDateofcollection)
|
||||||
|
result.setOriginalId(p.getPid.asScala.map(p => p.getValue).asJava)
|
||||||
|
result.setDateoftransformation(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'")))
|
||||||
|
if (p.getAuthor == null || p.getAuthor.isEmpty)
|
||||||
|
return null
|
||||||
|
result.setAuthor(p.getAuthor.asScala.map(convertAuthor).asJava)
|
||||||
|
result.setResulttype(createQualifier(p.getResulttype.getClassid, p.getResulttype.getClassname, "dnet:result_typologies", "dnet:result_typologies"))
|
||||||
|
|
||||||
|
if (p.getSubject != null)
|
||||||
|
result.setSubject(p.getSubject.asScala.map(convertSubject).asJava)
|
||||||
|
|
||||||
|
if (p.getTitle == null || p.getTitle.isEmpty)
|
||||||
|
return null
|
||||||
|
|
||||||
|
result.setTitle(List(patchTitle(p.getTitle.get(0))).asJava)
|
||||||
|
|
||||||
|
if (p.getRelevantdate == null || p.getRelevantdate.size() == 0)
|
||||||
|
return null
|
||||||
|
|
||||||
|
result.setRelevantdate(p.getRelevantdate.asScala.map(patchRelevantDate).asJava)
|
||||||
|
|
||||||
|
|
||||||
|
result.setDescription(p.getDescription)
|
||||||
|
|
||||||
|
result.setDateofacceptance(asField(p.getRelevantdate.get(0).getValue))
|
||||||
|
result.setPublisher(p.getPublisher)
|
||||||
|
result.setSource(p.getSource)
|
||||||
|
result.setBestaccessright(createQualifier("UNKNOWN", "not available", "dnet:access_modes", "dnet:access_modes"))
|
||||||
|
|
||||||
|
val dois = result.getPid.asScala.filter(p => "doi".equalsIgnoreCase(p.getQualifier.getClassname)).map(p => p.getValue)
|
||||||
|
if (dois.isEmpty)
|
||||||
|
return null
|
||||||
|
|
||||||
|
|
||||||
|
val i: Instance = createInstance(s"https://dx.doi.org/${dois.head}", firstInstanceOrNull(p.getInstance()), result.getDateofacceptance)
|
||||||
|
|
||||||
|
if (i != null)
|
||||||
|
result.setInstance(List(i).asJava)
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def convertDLIRelation(r: DLIRelation): Relation = {
|
||||||
|
|
||||||
|
val result = new Relation
|
||||||
|
if (!relationTypeMapping.contains(r.getRelType))
|
||||||
|
return null
|
||||||
|
|
||||||
|
if (r.getCollectedFrom == null || r.getCollectedFrom.size() == 0 || (r.getCollectedFrom.size() == 1 && r.getCollectedFrom.get(0) == null))
|
||||||
|
return null
|
||||||
|
val t = relationTypeMapping.get(r.getRelType)
|
||||||
|
|
||||||
|
result.setRelType("resultResult")
|
||||||
|
result.setRelClass(t.get._1)
|
||||||
|
result.setSubRelType(t.get._2)
|
||||||
|
result.setCollectedfrom(r.getCollectedFrom.asScala.map(c => collectedFromMap.getOrElse(c.getKey, null)).filter(p => p != null).asJava)
|
||||||
|
result.setSource(generateId(r.getSource))
|
||||||
|
result.setTarget(generateId(r.getTarget))
|
||||||
|
|
||||||
|
if (result.getSource.equals(result.getTarget))
|
||||||
|
return null
|
||||||
|
result.setDataInfo(generateDataInfo())
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def convertDLIDatasetTOOAF(d: DLIDataset): Dataset = {
|
||||||
|
|
||||||
|
if (d.getCollectedfrom == null || d.getCollectedfrom.size() == 0 || (d.getCollectedfrom.size() == 1 && d.getCollectedfrom.get(0) == null))
|
||||||
|
return null
|
||||||
|
val result: Dataset = new Dataset
|
||||||
|
result.setId(generateId(d.getId))
|
||||||
|
result.setDataInfo(generateDataInfo())
|
||||||
|
result.setCollectedfrom(d.getCollectedfrom.asScala.map(c => collectedFromMap.getOrElse(c.getKey, null)).asJava)
|
||||||
|
|
||||||
|
|
||||||
|
result.setPid(d.getPid)
|
||||||
|
|
||||||
|
val fpids = result.getPid.asScala.filter(p => "doi".equalsIgnoreCase(p.getQualifier.getClassname) ||
|
||||||
|
"pdb".equalsIgnoreCase(p.getQualifier.getClassname)
|
||||||
|
).map(p => p.getValue)
|
||||||
|
|
||||||
|
if (fpids == null || fpids.isEmpty)
|
||||||
|
return null
|
||||||
|
|
||||||
|
|
||||||
|
result.setDateofcollection(d.getDateofcollection)
|
||||||
|
result.setOriginalId(d.getPid.asScala.map(d => d.getValue).asJava)
|
||||||
|
result.setDateoftransformation(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'")))
|
||||||
|
if (d.getAuthor == null || d.getAuthor.isEmpty)
|
||||||
|
return null
|
||||||
|
result.setAuthor(d.getAuthor.asScala.map(convertAuthor).asJava)
|
||||||
|
result.setResulttype(createQualifier(d.getResulttype.getClassid, d.getResulttype.getClassname, "dnet:result_typologies", "dnet:result_typologies"))
|
||||||
|
|
||||||
|
if (d.getSubject != null)
|
||||||
|
result.setSubject(d.getSubject.asScala.map(convertSubject).asJava)
|
||||||
|
|
||||||
|
if (d.getTitle == null || d.getTitle.isEmpty)
|
||||||
|
return null
|
||||||
|
|
||||||
|
result.setTitle(List(patchTitle(d.getTitle.get(0))).asJava)
|
||||||
|
|
||||||
|
if (d.getRelevantdate == null || d.getRelevantdate.size() == 0)
|
||||||
|
return null
|
||||||
|
|
||||||
|
result.setRelevantdate(d.getRelevantdate.asScala.map(patchRelevantDate).asJava)
|
||||||
|
|
||||||
|
|
||||||
|
result.setDescription(d.getDescription)
|
||||||
|
|
||||||
|
result.setDateofacceptance(asField(d.getRelevantdate.get(0).getValue))
|
||||||
|
result.setPublisher(d.getPublisher)
|
||||||
|
result.setSource(d.getSource)
|
||||||
|
result.setBestaccessright(createQualifier("UNKNOWN", "not available", "dnet:access_modes", "dnet:access_modes"))
|
||||||
|
|
||||||
|
|
||||||
|
val instance_urls = if (fpids.head.length < 5) s"https://www.rcsb.org/structure/${fpids.head}" else s"https://dx.doi.org/${fpids.head}"
|
||||||
|
|
||||||
|
val i: Instance = createInstance(instance_urls, firstInstanceOrNull(d.getInstance()), result.getDateofacceptance, true)
|
||||||
|
if (i != null)
|
||||||
|
result.setInstance(List(i).asJava)
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def firstInstanceOrNull(instances: java.util.List[Instance]): Instance = {
|
||||||
|
|
||||||
|
if (instances == null || instances.size() == 0)
|
||||||
|
return null
|
||||||
|
instances.get(0)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def createInstance(url: String, originalInstance: Instance, doa: Field[String], dataset: Boolean = false): Instance = {
|
||||||
|
|
||||||
|
val i = new Instance
|
||||||
|
i.setUrl(List(url).asJava)
|
||||||
|
if (dataset)
|
||||||
|
i.setInstancetype(createQualifier("0021", "Dataset", "dnet:publication_resource", "dnet:publication_resource"))
|
||||||
|
else
|
||||||
|
i.setInstancetype(createQualifier("0000", "UNKNOWN", "dnet:publication_resource", "dnet:publication_resource"))
|
||||||
|
if (originalInstance != null && originalInstance.getHostedby != null)
|
||||||
|
i.setHostedby(originalInstance.getHostedby)
|
||||||
|
|
||||||
|
i.setAccessright(createQualifier("UNKNOWN", "not available", "dnet:access_modes", "dnet:access_modes"))
|
||||||
|
i.setDateofacceptance(doa)
|
||||||
|
|
||||||
|
i
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def patchRelevantDate(d: StructuredProperty): StructuredProperty = {
|
||||||
|
d.setQualifier(createQualifier("UNKNOWN", "dnet:dataCite_date"))
|
||||||
|
d
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
def patchTitle(t: StructuredProperty): StructuredProperty = {
|
||||||
|
t.setQualifier(createQualifier("main title", "dnet:dataCite_title"))
|
||||||
|
t
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def convertSubject(s: StructuredProperty): StructuredProperty = {
|
||||||
|
s.setQualifier(createQualifier("keyword", "dnet:subject_classification_typologies"))
|
||||||
|
s
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def convertAuthor(a: Author): Author = {
|
||||||
|
if (a == null)
|
||||||
|
return a
|
||||||
|
val p = new PacePerson(a.getFullname, false)
|
||||||
|
if (p.isAccurate) {
|
||||||
|
a.setName(p.getNameString)
|
||||||
|
a.setSurname(p.getSurnameString)
|
||||||
|
}
|
||||||
|
a
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def generateId(id: String): String = {
|
||||||
|
val md5 = if (id.contains("::")) StringUtils.substringAfter(id, "::") else StringUtils.substringAfter(id, "|")
|
||||||
|
s"50|scholix_____::$md5"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def generateKeyValue(key: String, value: String): KeyValue = {
|
||||||
|
val kv: KeyValue = new KeyValue()
|
||||||
|
kv.setKey(key)
|
||||||
|
kv.setValue(value)
|
||||||
|
kv.setDataInfo(generateDataInfo("0.9"))
|
||||||
|
kv
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def generateDataInfo(trust: String = "0.9", invisibile: Boolean = false): DataInfo = {
|
||||||
|
val di = new DataInfo
|
||||||
|
di.setDeletedbyinference(false)
|
||||||
|
di.setInferred(false)
|
||||||
|
di.setInvisible(false)
|
||||||
|
di.setTrust(trust)
|
||||||
|
di.setProvenanceaction(createQualifier("sysimport:actionset", "dnet:provenanceActions"))
|
||||||
|
di
|
||||||
|
}
|
||||||
|
|
||||||
|
def createQualifier(cls: String, sch: String): Qualifier = {
|
||||||
|
createQualifier(cls, cls, sch, sch)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def createQualifier(classId: String, className: String, schemeId: String, schemeName: String): Qualifier = {
|
||||||
|
val q: Qualifier = new Qualifier
|
||||||
|
q.setClassid(classId)
|
||||||
|
q.setClassname(className)
|
||||||
|
q.setSchemeid(schemeId)
|
||||||
|
q.setSchemename(schemeName)
|
||||||
|
q
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def asField[T](value: T): Field[T] = {
|
||||||
|
val tmp = new Field[T]
|
||||||
|
tmp.setValue(value)
|
||||||
|
tmp
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,118 @@
|
||||||
|
package eu.dnetlib.dhp.`export`
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.{Publication, Relation, Dataset => OafDataset}
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.{DLIDataset, DLIPublication, DLIRelation}
|
||||||
|
import org.apache.commons.io.IOUtils
|
||||||
|
import org.apache.spark.rdd.RDD
|
||||||
|
import org.apache.spark.sql.{Dataset, Encoder, Encoders, SaveMode, SparkSession}
|
||||||
|
import org.apache.spark.{SparkConf, SparkContext}
|
||||||
|
import org.codehaus.jackson.map.ObjectMapper
|
||||||
|
import scala.collection.mutable.ArrayBuffer
|
||||||
|
|
||||||
|
|
||||||
|
object SparkExportContentForOpenAire {
|
||||||
|
|
||||||
|
|
||||||
|
def main(args: Array[String]): Unit = {
|
||||||
|
val conf: SparkConf = new SparkConf()
|
||||||
|
val parser = new ArgumentApplicationParser(IOUtils.toString(SparkExportContentForOpenAire.getClass.getResourceAsStream("input_export_content_parameters.json")))
|
||||||
|
parser.parseArgument(args)
|
||||||
|
val spark: SparkSession =
|
||||||
|
SparkSession
|
||||||
|
.builder()
|
||||||
|
.config(conf)
|
||||||
|
.appName(SparkExportContentForOpenAire.getClass.getSimpleName)
|
||||||
|
.master(parser.get("master")).getOrCreate()
|
||||||
|
|
||||||
|
|
||||||
|
val sc:SparkContext = spark.sparkContext
|
||||||
|
|
||||||
|
val workingPath = parser.get("workingDirPath")
|
||||||
|
|
||||||
|
implicit val pubEncoder: Encoder[Publication] = Encoders.bean(classOf[Publication])
|
||||||
|
implicit val datEncoder: Encoder[OafDataset] = Encoders.bean(classOf[OafDataset])
|
||||||
|
implicit val relEncoder: Encoder[Relation] = Encoders.bean(classOf[Relation])
|
||||||
|
implicit val dliRelEncoder: Encoder[DLIRelation] = Encoders.bean(classOf[DLIRelation])
|
||||||
|
import spark.implicits._
|
||||||
|
|
||||||
|
//
|
||||||
|
// val relRDD:RDD[Relation] = sc.textFile(s"$workingPath/relation_j")
|
||||||
|
// .map(s => new ObjectMapper().readValue(s, classOf[DLIRelation]))
|
||||||
|
// .filter(p => p.getDataInfo.getDeletedbyinference == false)
|
||||||
|
// .map(DLIToOAF.convertDLIRelation).filter(p=>p!= null)
|
||||||
|
// spark.createDataset(relRDD).write.mode(SaveMode.Overwrite).save(s"$workingPath/relationDS")
|
||||||
|
//
|
||||||
|
// val datRDD:RDD[OafDataset] = sc.textFile(s"$workingPath/dataset")
|
||||||
|
// .map(s => new ObjectMapper().readValue(s, classOf[DLIDataset]))
|
||||||
|
// .filter(p => p.getDataInfo.getDeletedbyinference == false)
|
||||||
|
// .map(DLIToOAF.convertDLIDatasetTOOAF).filter(p=>p!= null)
|
||||||
|
// spark.createDataset(datRDD).write.mode(SaveMode.Overwrite).save(s"$workingPath/datasetDS")
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// val pubRDD:RDD[Publication] = sc.textFile(s"$workingPath/publication")
|
||||||
|
// .map(s => new ObjectMapper().readValue(s, classOf[DLIPublication]))
|
||||||
|
// .filter(p => p.getDataInfo.getDeletedbyinference == false)
|
||||||
|
// .map(DLIToOAF.convertDLIPublicationToOAF).filter(p=>p!= null)
|
||||||
|
// spark.createDataset(pubRDD).write.mode(SaveMode.Overwrite).save(s"$workingPath/publicationDS")
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// val pubs:Dataset[Publication] = spark.read.load(s"$workingPath/publicationDS").as[Publication]
|
||||||
|
// val dats :Dataset[OafDataset] = spark.read.load(s"$workingPath/datasetDS").as[OafDataset]
|
||||||
|
var relDS :Dataset[Relation] = spark.read.load(s"$workingPath/relationDS").as[Relation]
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// pubs.joinWith(relDS, pubs("id").equalTo(relDS("source"))).map(k => k._2).write.mode(SaveMode.Overwrite).save(s"$workingPath/relationDS_f1")
|
||||||
|
//
|
||||||
|
// relDS= spark.read.load(s"$workingPath/relationDS_f1").as[Relation]
|
||||||
|
//
|
||||||
|
// relDS.joinWith(dats, relDS("target").equalTo(dats("id"))).map(k => k._1).write.mode(SaveMode.Overwrite).save(s"$workingPath/relationDS_filtered")
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// val r_source = relDS.select(relDS("source")).distinct()
|
||||||
|
// val r_target = relDS.select(relDS("source")).distinct()
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// pubs.joinWith(r_source, pubs("id").equalTo(r_source("source")), "inner").map(k => k._1).write.mode(SaveMode.Overwrite).save(s"$workingPath/publicationDS_filtered")
|
||||||
|
//
|
||||||
|
// dats.joinWith(r_target, dats("id").equalTo(r_target("target")), "inner").map(k => k._1).write.mode(SaveMode.Overwrite).save(s"$workingPath/datasetDS_filtered")
|
||||||
|
//
|
||||||
|
// spark.createDataset(sc.textFile(s"$workingPath/dataset")
|
||||||
|
// .map(s => new ObjectMapper().readValue(s, classOf[DLIDataset]))
|
||||||
|
// .map(DLIToOAF.convertDLIDatasetToExternalReference)
|
||||||
|
// .filter(p => p != null)).as[DLIExternalReference].write.mode(SaveMode.Overwrite).save(s"$workingPath/externalReference")
|
||||||
|
//
|
||||||
|
|
||||||
|
val pf = spark.read.load(s"$workingPath/publicationDS_filtered").select("id")
|
||||||
|
relDS = spark.read.load(s"$workingPath/relationDS").as[Relation]
|
||||||
|
val relationTo = pf.joinWith(relDS, pf("id").equalTo(relDS("source")),"inner").map(t =>t._2)
|
||||||
|
|
||||||
|
val extRef = spark.read.load(s"$workingPath/externalReference").as[DLIExternalReference]
|
||||||
|
|
||||||
|
spark.createDataset(relationTo.joinWith(extRef, relationTo("target").equalTo(extRef("id")), "inner").map(d => {
|
||||||
|
val r = d._1
|
||||||
|
val ext = d._2
|
||||||
|
(r.getSource, ext)
|
||||||
|
}).rdd.groupByKey.map(f => {
|
||||||
|
var dli_ext = ArrayBuffer[DLIExternalReference]()
|
||||||
|
f._2.foreach(d => if (dli_ext.size < 100) dli_ext += d )
|
||||||
|
(f._1, dli_ext)
|
||||||
|
})).write.mode(SaveMode.Overwrite).save(s"$workingPath/externalReference_grouped")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "mt",
|
||||||
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "w",
|
||||||
|
"paramLongName": "workingDirPath",
|
||||||
|
"paramDescription": "the working path where generated files",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,42 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.wf.rerun.failnodes</name>
|
||||||
|
<value>false</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hive_metastore_uris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<value>http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<value>/user/spark/spark2ApplicationHistory</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorAppListener"</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorQueryListener"</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,49 @@
|
||||||
|
<workflow-app name="Export Scholexplorer Graph to OpenAIRE" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>workingDirPath</name>
|
||||||
|
<description>the source path</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<start to="ExtractOAF"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="ExtractOAF">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>ExtractOAF</name>
|
||||||
|
<class>eu.dnetlib.dhp.export.SparkExportContentForOpenAire</class>
|
||||||
|
<jar>dhp-graph-provision-scholexplorer-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
|
${sparkExtraOPT}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
|
||||||
|
<arg>--master</arg><arg>yarn-cluster</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,75 @@
|
||||||
|
package eu.dnetlib.dhp.export
|
||||||
|
|
||||||
|
import java.time.LocalDateTime
|
||||||
|
import java.time.format.DateTimeFormatter
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation
|
||||||
|
import eu.dnetlib.dhp.schema.scholexplorer.{DLIDataset, DLIPublication, DLIRelation}
|
||||||
|
import org.apache.spark.SparkConf
|
||||||
|
import org.apache.spark.rdd.RDD
|
||||||
|
import org.apache.spark.sql.SparkSession
|
||||||
|
import org.codehaus.jackson.map.{ObjectMapper, SerializationConfig}
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
|
||||||
|
import scala.io.Source
|
||||||
|
|
||||||
|
class ExportDLITOOAFTest {
|
||||||
|
|
||||||
|
val mapper = new ObjectMapper()
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testDate():Unit = {
|
||||||
|
println(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'")))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testPublicationMapping():Unit = {
|
||||||
|
|
||||||
|
mapper.getSerializationConfig.enable(SerializationConfig.Feature.INDENT_OUTPUT)
|
||||||
|
val json = Source.fromInputStream(getClass.getResourceAsStream("publication.json")).mkString
|
||||||
|
|
||||||
|
|
||||||
|
val oaf =DLIToOAF.convertDLIPublicationToOAF(mapper.readValue(json, classOf[DLIPublication]))
|
||||||
|
|
||||||
|
println(mapper.writeValueAsString(oaf))
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testExternalReferenceMapping():Unit = {
|
||||||
|
|
||||||
|
mapper.getSerializationConfig.enable(SerializationConfig.Feature.INDENT_OUTPUT)
|
||||||
|
val json = Source.fromInputStream(getClass.getResourceAsStream("dataset.json")).mkString
|
||||||
|
|
||||||
|
|
||||||
|
val oaf =DLIToOAF.convertDLIDatasetToExternalReference(mapper.readValue(json, classOf[DLIDataset]))
|
||||||
|
|
||||||
|
println(oaf)
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def testRelationMapping():Unit = {
|
||||||
|
|
||||||
|
mapper.getSerializationConfig.enable(SerializationConfig.Feature.INDENT_OUTPUT)
|
||||||
|
val json = Source.fromInputStream(getClass.getResourceAsStream("relation.json")).mkString
|
||||||
|
|
||||||
|
|
||||||
|
val oaf =DLIToOAF.convertDLIRelation(mapper.readValue(json, classOf[DLIRelation]))
|
||||||
|
|
||||||
|
println(mapper.writeValueAsString(oaf))
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,101 @@
|
||||||
|
{
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": null,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "0.9",
|
||||||
|
"inferenceprovenance": null,
|
||||||
|
"provenanceaction": null
|
||||||
|
},
|
||||||
|
"lastupdatetimestamp": null,
|
||||||
|
"id": "60|719f19e5a996de1b87cddf93871bf2d4",
|
||||||
|
"originalId": [
|
||||||
|
"a0a3p2gws9::uniprot"
|
||||||
|
],
|
||||||
|
"collectedfrom": [
|
||||||
|
{
|
||||||
|
"key": "dli_________::europe_pmc__",
|
||||||
|
"value": "Europe PMC",
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pid": [
|
||||||
|
{
|
||||||
|
"value": "acc63471",
|
||||||
|
"qualifier": {
|
||||||
|
"classid": "ena",
|
||||||
|
"classname": "ena",
|
||||||
|
"schemeid": "dnet:pid_types",
|
||||||
|
"schemename": "dnet:pid_types"
|
||||||
|
},
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dateofcollection": "2019-07-05T12:47:11.545+02:00",
|
||||||
|
"dateoftransformation": null,
|
||||||
|
"extraInfo": null,
|
||||||
|
"oaiprovenance": null,
|
||||||
|
"author": null,
|
||||||
|
"resulttype": {
|
||||||
|
"classid": "dataset",
|
||||||
|
"classname": "dataset",
|
||||||
|
"schemeid": "dataset",
|
||||||
|
"schemename": "dataset"
|
||||||
|
},
|
||||||
|
"language": null,
|
||||||
|
"country": null,
|
||||||
|
"subject": [],
|
||||||
|
"title": [
|
||||||
|
{
|
||||||
|
"value": "CMD domain-containing protein",
|
||||||
|
"qualifier": null,
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relevantdate": [
|
||||||
|
{
|
||||||
|
"value": "2019-07-15T16:14:28.636",
|
||||||
|
"qualifier": {
|
||||||
|
"classid": "resolvedDate",
|
||||||
|
"classname": "resolvedDate",
|
||||||
|
"schemeid": "dnet::date",
|
||||||
|
"schemename": "dnet::date"
|
||||||
|
},
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": null,
|
||||||
|
"dateofacceptance": null,
|
||||||
|
"publisher": {
|
||||||
|
"value": "UniProt",
|
||||||
|
"dataInfo": null
|
||||||
|
},
|
||||||
|
"embargoenddate": null,
|
||||||
|
"source": null,
|
||||||
|
"fulltext": null,
|
||||||
|
"format": null,
|
||||||
|
"contributor": null,
|
||||||
|
"resourcetype": null,
|
||||||
|
"coverage": null,
|
||||||
|
"bestaccessright": null,
|
||||||
|
"context": null,
|
||||||
|
"externalReference": null,
|
||||||
|
"instance": [],
|
||||||
|
"storagedate": null,
|
||||||
|
"device": null,
|
||||||
|
"size": null,
|
||||||
|
"version": null,
|
||||||
|
"lastmetadataupdate": null,
|
||||||
|
"metadataversionnumber": null,
|
||||||
|
"geolocation": null,
|
||||||
|
"originalObjIdentifier": "europe_pmc__::719f19e5a996de1b87cddf93871bf2d4",
|
||||||
|
"dlicollectedfrom": [
|
||||||
|
{
|
||||||
|
"id": "dli_________::europe_pmc__",
|
||||||
|
"name": "Europe PMC",
|
||||||
|
"completionStatus": "complete",
|
||||||
|
"collectionMode": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"completionStatus": "complete"
|
||||||
|
}
|
|
@ -0,0 +1,128 @@
|
||||||
|
{
|
||||||
|
"dataInfo": {
|
||||||
|
"invisible": false,
|
||||||
|
"inferred": null,
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"trust": "0.9",
|
||||||
|
"inferenceprovenance": null,
|
||||||
|
"provenanceaction": null
|
||||||
|
},
|
||||||
|
"lastupdatetimestamp": null,
|
||||||
|
"id": "50|9e117414be07bf03cbce8889d22d661a",
|
||||||
|
"originalId": [
|
||||||
|
"9e117414be07bf03cbce8889d22d661a"
|
||||||
|
],
|
||||||
|
"collectedfrom": [
|
||||||
|
{
|
||||||
|
"key": "dli_________::crossref",
|
||||||
|
"value": "Crossref",
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pid": [
|
||||||
|
{
|
||||||
|
"value": "10.1007/978-94-017-3490-5_15",
|
||||||
|
"qualifier": {
|
||||||
|
"classid": "doi",
|
||||||
|
"classname": "doi",
|
||||||
|
"schemeid": "dnet:pid_types",
|
||||||
|
"schemename": "dnet:pid_types"
|
||||||
|
},
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dateofcollection": "2020-06-08T07:28:55.731Z",
|
||||||
|
"dateoftransformation": null,
|
||||||
|
"extraInfo": null,
|
||||||
|
"oaiprovenance": null,
|
||||||
|
"author": [
|
||||||
|
{
|
||||||
|
"fullname": "Calcaterra Domenico",
|
||||||
|
"name": null,
|
||||||
|
"surname": null,
|
||||||
|
"rank": null,
|
||||||
|
"pid": null,
|
||||||
|
"affiliation": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullname": "Parise Mario",
|
||||||
|
"name": null,
|
||||||
|
"surname": null,
|
||||||
|
"rank": null,
|
||||||
|
"pid": null,
|
||||||
|
"affiliation": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"resulttype": {
|
||||||
|
"classid": "publication",
|
||||||
|
"classname": "publication",
|
||||||
|
"schemeid": "publication",
|
||||||
|
"schemename": "publication"
|
||||||
|
},
|
||||||
|
"language": null,
|
||||||
|
"country": null,
|
||||||
|
"subject":[
|
||||||
|
{
|
||||||
|
"value":"Strain-linked information about bacterial and archaeal biodiversity",
|
||||||
|
"qualifier":{
|
||||||
|
"classid":"dnet:subject",
|
||||||
|
"classname":"dnet:subject",
|
||||||
|
"schemeid":"",
|
||||||
|
"schemename":""
|
||||||
|
},
|
||||||
|
"dataInfo":null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": [
|
||||||
|
{
|
||||||
|
"value": "The Contribution of Historical Information in the Assessment of Landslide Hazard",
|
||||||
|
"qualifier": null,
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"relevantdate": [
|
||||||
|
{
|
||||||
|
"value": "2013-01-29T16:50:44Z",
|
||||||
|
"qualifier": {
|
||||||
|
"classid": "date",
|
||||||
|
"classname": "date",
|
||||||
|
"schemeid": "dnet::date",
|
||||||
|
"schemename": "dnet::date"
|
||||||
|
},
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": [
|
||||||
|
{
|
||||||
|
"value": null,
|
||||||
|
"dataInfo": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dateofacceptance": null,
|
||||||
|
"publisher": {
|
||||||
|
"value": "Springer Netherlands",
|
||||||
|
"dataInfo": null
|
||||||
|
},
|
||||||
|
"embargoenddate": null,
|
||||||
|
"source": null,
|
||||||
|
"fulltext": null,
|
||||||
|
"format": null,
|
||||||
|
"contributor": null,
|
||||||
|
"resourcetype": null,
|
||||||
|
"coverage": null,
|
||||||
|
"bestaccessright": null,
|
||||||
|
"context": null,
|
||||||
|
"externalReference": null,
|
||||||
|
"instance": [],
|
||||||
|
"journal": null,
|
||||||
|
"originalObjIdentifier": "dli_resolver::9e117414be07bf03cbce8889d22d661a",
|
||||||
|
"dlicollectedfrom": [
|
||||||
|
{
|
||||||
|
"id": "dli_________::crossref",
|
||||||
|
"name": "Crossref",
|
||||||
|
"completionStatus": "complete",
|
||||||
|
"collectionMode": "resolved"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"completionStatus": "complete"
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
{
|
||||||
|
"subRelType": null,
|
||||||
|
"relClass": "datacite",
|
||||||
|
"dataInfo": {
|
||||||
|
"deletedbyinference": false,
|
||||||
|
"provenanceaction": null,
|
||||||
|
"inferred": null,
|
||||||
|
"inferenceprovenance": null,
|
||||||
|
"invisible": false,
|
||||||
|
"trust": "0.9"
|
||||||
|
},
|
||||||
|
"target": "50|00062410e2a15322480277d063c181bb",
|
||||||
|
"lastupdatetimestamp": null,
|
||||||
|
"relType": "IsReferencedBy",
|
||||||
|
"source": "60|4ee78ab329b49416b45c3774c132f244",
|
||||||
|
"collectedFrom": [
|
||||||
|
{
|
||||||
|
"dataInfo": null,
|
||||||
|
"value": "Europe PMC",
|
||||||
|
"key": "dli_________::europe_pmc__"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue