merged from beta. It compiles

This commit is contained in:
Claudio Atzori 2023-03-10 16:00:48 +01:00
commit 2a914e4463
124 changed files with 5196 additions and 3731 deletions

View File

@ -0,0 +1,100 @@
package eu.dnetlib.dhp.common;
/**
* This utility represent the Metadata Store information
* needed during the migration from mongo to HDFS to store
*/
public class MDStoreInfo {
private String mdstore;
private String currentId;
private Long latestTimestamp;
/**
* Instantiates a new Md store info.
*/
public MDStoreInfo() {
}
/**
* Instantiates a new Md store info.
*
* @param mdstore the mdstore
* @param currentId the current id
* @param latestTimestamp the latest timestamp
*/
public MDStoreInfo(String mdstore, String currentId, Long latestTimestamp) {
this.mdstore = mdstore;
this.currentId = currentId;
this.latestTimestamp = latestTimestamp;
}
/**
* Gets mdstore.
*
* @return the mdstore
*/
public String getMdstore() {
return mdstore;
}
/**
* Sets mdstore.
*
* @param mdstore the mdstore
* @return the mdstore
*/
public MDStoreInfo setMdstore(String mdstore) {
this.mdstore = mdstore;
return this;
}
/**
* Gets current id.
*
* @return the current id
*/
public String getCurrentId() {
return currentId;
}
/**
* Sets current id.
*
* @param currentId the current id
* @return the current id
*/
public MDStoreInfo setCurrentId(String currentId) {
this.currentId = currentId;
return this;
}
/**
* Gets latest timestamp.
*
* @return the latest timestamp
*/
public Long getLatestTimestamp() {
return latestTimestamp;
}
/**
* Sets latest timestamp.
*
* @param latestTimestamp the latest timestamp
* @return the latest timestamp
*/
public MDStoreInfo setLatestTimestamp(Long latestTimestamp) {
this.latestTimestamp = latestTimestamp;
return this;
}
@Override
public String toString() {
return "MDStoreInfo{" +
"mdstore='" + mdstore + '\'' +
", currentId='" + currentId + '\'' +
", latestTimestamp=" + latestTimestamp +
'}';
}
}

View File

@ -1,12 +1,12 @@
package eu.dnetlib.dhp.common;
import static com.mongodb.client.model.Sorts.descending;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.commons.lang3.StringUtils;
@ -38,6 +38,26 @@ public class MdstoreClient implements Closeable {
this.db = getDb(client, dbName);
}
private Long parseTimestamp(Document f) {
if (f == null || !f.containsKey("timestamp"))
return null;
Object ts = f.get("timestamp");
return Long.parseLong(ts.toString());
}
public Long getLatestTimestamp(final String collectionId) {
MongoCollection<Document> collection = db.getCollection(collectionId);
FindIterable<Document> result = collection.find().sort(descending("timestamp")).limit(1);
if (result == null) {
return null;
}
Document f = result.first();
return parseTimestamp(f);
}
public MongoCollection<Document> mdStore(final String mdId) {
BasicDBObject query = (BasicDBObject) QueryBuilder.start("mdId").is(mdId).get();
@ -54,6 +74,16 @@ public class MdstoreClient implements Closeable {
return getColl(db, currentId, true);
}
public List<MDStoreInfo> mdStoreWithTimestamp(final String mdFormat, final String mdLayout,
final String mdInterpretation) {
Map<String, String> res = validCollections(mdFormat, mdLayout, mdInterpretation);
return res
.entrySet()
.stream()
.map(e -> new MDStoreInfo(e.getKey(), e.getValue(), getLatestTimestamp(e.getValue())))
.collect(Collectors.toList());
}
public Map<String, String> validCollections(
final String mdFormat, final String mdLayout, final String mdInterpretation) {

View File

@ -13,6 +13,8 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
import com.github.sisyphsu.dateparser.DateParserUtils;
import com.google.common.collect.Lists;

View File

@ -129,7 +129,7 @@ public class MergeUtils {
return (T) mergedEntity;
}
private static <T extends Oaf> T mergeRelation(T left, T right) {
public static <T extends Oaf> T mergeRelation(T left, T right) {
Relation original = (Relation) left;
Relation enrich = (Relation) right;

View File

@ -165,6 +165,21 @@ public class OafMapperUtils {
return ap;
}
public static AuthorPid authorPid(
final String value,
final String classid,
final String schemeid,
final DataInfo dataInfo) {
if (value == null) {
return null;
}
final AuthorPid ap = new AuthorPid();
ap.setValue(value);
ap.setQualifier(qualifier(classid, classid, schemeid));
ap.setDataInfo(dataInfo);
return ap;
}
public static ExtraInfo extraInfo(
final String name,
final String value,

View File

@ -0,0 +1,36 @@
package eu.dnetlib.dhp.common;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import org.junit.jupiter.api.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class MdStoreClientTest {
@Test
public void testMongoCollection() throws IOException {
final MdstoreClient client = new MdstoreClient("mongodb://localhost:27017", "mdstore");
final ObjectMapper mapper = new ObjectMapper();
final List<MDStoreInfo> infos = client.mdStoreWithTimestamp("ODF", "store", "cleaned");
infos.forEach(System.out::println);
final String s = mapper.writeValueAsString(infos);
Path fileName = Paths.get("/Users/sandro/mdstore_info.json");
// Writing into the file
Files.write(fileName, s.getBytes(StandardCharsets.UTF_8));
}
}

View File

@ -107,7 +107,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=2560
--conf spark.sql.shuffle.partitions=7000
</spark-opts>
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/dataset</arg>
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
@ -159,7 +159,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=2560
--conf spark.sql.shuffle.partitions=7000
</spark-opts>
<arg>--inputGraphTablePath</arg><arg>${workingDir}/dataset</arg>
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>

View File

@ -107,7 +107,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=5000
--conf spark.sql.shuffle.partitions=7000
</spark-opts>
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/publication</arg>
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
@ -159,7 +159,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=5000
--conf spark.sql.shuffle.partitions=7000
</spark-opts>
<arg>--inputGraphTablePath</arg><arg>${workingDir}/publication</arg>
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>

View File

@ -99,7 +99,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=5000
--conf spark.sql.shuffle.partitions=10000
</spark-opts>
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/relation</arg>
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>

View File

@ -23,6 +23,7 @@ public class Constants {
public static final String DOI_CLASSNAME = "Digital Object Identifier";
public static final String DEFAULT_DELIMITER = ",";
public static final String DEFAULT_FOS_DELIMITER = "\t";
public static final String UPDATE_DATA_INFO_TYPE = "update";
public static final String UPDATE_SUBJECT_FOS_CLASS_ID = "subject:fos";
@ -86,7 +87,7 @@ public class Constants {
public static Subject getSubject(String sbj, String classid, String classname,
String diqualifierclassid) {
if (sbj.equals(NULL))
if (sbj == null || sbj.equals(NULL))
return null;
Subject s = new Subject();
s.setValue(sbj);

View File

@ -1,7 +1,7 @@
package eu.dnetlib.dhp.actionmanager.createunresolvedentities;
import static eu.dnetlib.dhp.actionmanager.Constants.DEFAULT_DELIMITER;
import static eu.dnetlib.dhp.actionmanager.Constants.DEFAULT_FOS_DELIMITER;
import static eu.dnetlib.dhp.actionmanager.Constants.isSparkSessionManaged;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
@ -9,8 +9,6 @@ import java.io.Serializable;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;
@ -49,7 +47,7 @@ public class GetFOSSparkJob implements Serializable {
final String delimiter = Optional
.ofNullable(parser.get("delimiter"))
.orElse(DEFAULT_DELIMITER);
.orElse(DEFAULT_FOS_DELIMITER);
SparkConf sconf = new SparkConf();
runWithSparkSession(

View File

@ -266,11 +266,15 @@ public class PrepareProgramme {
String code = csvProgramme.getCode();
if (!code.endsWith(".") && !code.contains("Euratom")
&& !code.equals("H2020-EC"))
&& !code.equals("H2020-EC") && !code.equals("H2020") &&
!code.equals("H2020-Topics"))
code += ".";
csvProgramme.setClassification(map.get(code)._1());
csvProgramme.setClassification_short(map.get(code)._2());
if (map.containsKey(code)) {
csvProgramme.setClassification(map.get(code)._1());
csvProgramme.setClassification_short(map.get(code)._2());
} else
log.info("WARNING: No entry in map for code " + code);
return csvProgramme;
}).collect();

View File

@ -3,12 +3,23 @@ package eu.dnetlib.dhp.actionmanager.project;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.util.*;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
@ -19,6 +30,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import scala.Tuple2;
@ -54,6 +66,9 @@ public class PrepareProjects {
final String projectPath = parser.get("projectPath");
log.info("projectPath {}: ", projectPath);
final String workingPath = parser.get("workingPath");
log.info("workingPath {}: ", workingPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
@ -76,7 +91,7 @@ public class PrepareProjects {
}
private static void exec(SparkSession spark, String projectPath, String dbProjectPath, String outputPath) {
Dataset<CSVProject> project = readPath(spark, projectPath, CSVProject.class);
Dataset<Project> project = readPath(spark, projectPath, Project.class);
Dataset<ProjectSubset> dbProjects = readPath(spark, dbProjectPath, ProjectSubset.class);
dbProjects
@ -90,14 +105,14 @@ public class PrepareProjects {
}
private static FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject> getTuple2CSVProjectFlatMapFunction() {
private static FlatMapFunction<Tuple2<ProjectSubset, Project>, CSVProject> getTuple2CSVProjectFlatMapFunction() {
return value -> {
Optional<CSVProject> csvProject = Optional.ofNullable(value._2());
List<CSVProject> csvProjectList = new ArrayList<>();
if (csvProject.isPresent()) {
if (Optional.ofNullable(value._2()).isPresent()) {
Project project = value._2();
String[] programme = csvProject.get().getProgramme().split(";");
String topic = csvProject.get().getTopics();
String[] programme = project.getLegalBasis().split(";");
String topic = project.getTopics();
Arrays
.stream(programme)
@ -106,7 +121,7 @@ public class PrepareProjects {
proj.setTopics(topic);
proj.setProgramme(p);
proj.setId(csvProject.get().getId());
proj.setId(project.getId());
csvProjectList.add(proj);
});
}

View File

@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProgramme;
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
import eu.dnetlib.dhp.actionmanager.project.utils.model.EXCELTopic;
import eu.dnetlib.dhp.actionmanager.project.utils.model.JsonTopic;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.action.AtomicAction;
@ -111,7 +112,7 @@ public class SparkAtomicActionJob {
Dataset<CSVProject> project = readPath(spark, projectPatH, CSVProject.class);
Dataset<CSVProgramme> programme = readPath(spark, programmePath, CSVProgramme.class);
Dataset<EXCELTopic> topic = readPath(spark, topicPath, EXCELTopic.class);
Dataset<JsonTopic> topic = readPath(spark, topicPath, JsonTopic.class);
Dataset<Project> aaproject = project
.joinWith(programme, project.col("programme").equalTo(programme.col("code")), "left")
@ -125,9 +126,7 @@ public class SparkAtomicActionJob {
Project pp = new Project();
pp
.setId(
createOpenaireId(
ModelSupport.entityIdPrefix.get("project"),
"corda__h2020", csvProject.getId()));
csvProject.getId());
pp.setH2020topiccode(csvProject.getTopics());
H2020Programme pm = new H2020Programme();
H2020Classification h2020classification = new H2020Classification();
@ -145,10 +144,15 @@ public class SparkAtomicActionJob {
.filter(Objects::nonNull);
aaproject
.joinWith(topic, aaproject.col("h2020topiccode").equalTo(topic.col("code")), "left")
.map((MapFunction<Tuple2<Project, EXCELTopic>, Project>) p -> {
Optional<EXCELTopic> op = Optional.ofNullable(p._2());
.joinWith(topic, aaproject.col("id").equalTo(topic.col("projectID")), "left")
.map((MapFunction<Tuple2<Project, JsonTopic>, Project>) p -> {
Optional<JsonTopic> op = Optional.ofNullable(p._2());
Project rp = p._1();
rp
.setId(
createOpenaireId(
ModelSupport.entityIdPrefix.get("project"),
"corda__h2020", rp.getId()));
op.ifPresent(excelTopic -> rp.setH2020topicdescription(excelTopic.getTitle()));
return rp;
}, Encoders.bean(Project.class))

View File

@ -22,6 +22,7 @@ import eu.dnetlib.dhp.actionmanager.project.utils.model.EXCELTopic;
/**
* Reads a generic excel file and maps it into classes that mirror its schema
*/
@Deprecated
public class EXCELParser {
public <R> List<R> parse(InputStream file, String classForName, String sheetName)

View File

@ -0,0 +1,101 @@
package eu.dnetlib.dhp.actionmanager.project.utils;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.PrepareProjects;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
/**
* @author miriam.baglioni
* @Date 28/02/23
*/
public class ExtractFromZip implements Serializable {
private static final Logger log = LoggerFactory.getLogger(PrepareProjects.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
PrepareProjects.class
.getResourceAsStream(
"/eu/dnetlib/dhp/actionmanager/project/extract_fromzip_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
final String inputPath = parser.get("inputPath");
log.info("inputPath {}: ", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
final String hdfsNameNode = parser.get("hdfsNameNode");
log.info("hdfsNameNode {}", hdfsNameNode);
Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fs = FileSystem.get(conf);
doExtract(inputPath, outputPath, fs);
}
private static void doExtract(String inputFile, String workingPath, FileSystem fileSystem)
throws IOException {
final Path path = new Path(inputFile);
FSDataInputStream project_zip = fileSystem.open(path);
try (ZipInputStream zis = new ZipInputStream(project_zip)) {
ZipEntry entry = null;
while ((entry = zis.getNextEntry()) != null) {
if (!entry.isDirectory()) {
String fileName = entry.getName();
byte buffer[] = new byte[1024];
int count;
try (
FSDataOutputStream out = fileSystem
.create(new Path(workingPath + fileName))) {
while ((count = zis.read(buffer, 0, buffer.length)) != -1)
out.write(buffer, 0, count);
}
}
}
}
}
}

View File

@ -6,7 +6,9 @@ import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.collection.GetCSV;
@ -40,8 +42,11 @@ public class ReadCSV {
conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(conf);
FSDataInputStream inputStream = fileSystem.open(new Path(fileURL));
BufferedReader reader = new BufferedReader(
new InputStreamReader(new HttpConnector2().getInputSourceAsStream(fileURL)));
new InputStreamReader(inputStream));
GetCSV.getCsv(fileSystem, reader, hdfsPath, classForName, del);

View File

@ -0,0 +1,90 @@
package eu.dnetlib.dhp.actionmanager.project.utils;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.PrepareProjects;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
/**
* @author miriam.baglioni
* @Date 28/02/23
*/
public class ReadProjects implements Serializable {
private static final Logger log = LoggerFactory.getLogger(ReadProjects.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
PrepareProjects.class
.getResourceAsStream(
"/eu/dnetlib/dhp/actionmanager/project/read_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
final String inputPath = parser.get("inputPath");
log.info("inputPath {}: ", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
final String hdfsNameNode = parser.get("hdfsNameNode");
log.info("hdfsNameNode {}", hdfsNameNode);
Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fs = FileSystem.get(conf);
readProjects(inputPath, outputPath, fs);
}
public static void readProjects(String inputFile, String workingPath, FileSystem fs) throws IOException {
Path hdfsreadpath = new Path(inputFile);
FSDataInputStream inputStream = fs.open(hdfsreadpath);
ArrayList<Project> projects = OBJECT_MAPPER
.readValue(
IOUtils.toString(inputStream, "UTF-8"),
new TypeReference<List<Project>>() {
});
Path hdfsWritePath = new Path(workingPath);
if (fs.exists(hdfsWritePath)) {
fs.delete(hdfsWritePath, false);
}
FSDataOutputStream fos = fs.create(hdfsWritePath);
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8))) {
for (Project p : projects) {
writer.write(OBJECT_MAPPER.writeValueAsString(p));
writer.newLine();
}
}
}
}

View File

@ -0,0 +1,92 @@
package eu.dnetlib.dhp.actionmanager.project.utils;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.PrepareProjects;
import eu.dnetlib.dhp.actionmanager.project.utils.model.JsonTopic;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
/**
* @author miriam.baglioni
* @Date 28/02/23
*/
public class ReadTopics implements Serializable {
private static final Logger log = LoggerFactory.getLogger(ReadTopics.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
PrepareProjects.class
.getResourceAsStream(
"/eu/dnetlib/dhp/actionmanager/project/read_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
final String inputPath = parser.get("inputPath");
log.info("inputPath {}: ", inputPath);
final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
final String hdfsNameNode = parser.get("hdfsNameNode");
log.info("hdfsNameNode {}", hdfsNameNode);
Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fs = FileSystem.get(conf);
readTopics(inputPath, outputPath, fs);
}
public static void readTopics(String inputFile, String workingPath, FileSystem fs) throws IOException {
Path hdfsreadpath = new Path(inputFile);
FSDataInputStream inputStream = fs.open(hdfsreadpath);
ArrayList<JsonTopic> topics = OBJECT_MAPPER
.readValue(
IOUtils.toString(inputStream, "UTF-8"),
new TypeReference<List<JsonTopic>>() {
});
Path hdfsWritePath = new Path(workingPath);
if (fs.exists(hdfsWritePath)) {
fs.delete(hdfsWritePath, false);
}
FSDataOutputStream fos = fs.create(hdfsWritePath);
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8))) {
for (JsonTopic p : topics) {
writer.write(OBJECT_MAPPER.writeValueAsString(p));
writer.newLine();
}
}
}
}

View File

@ -13,7 +13,7 @@ public class CSVProject implements Serializable {
@CsvBindByName(column = "id")
private String id;
@CsvBindByName(column = "programme")
@CsvBindByName(column = "legalBasis")
private String programme;
@CsvBindByName(column = "topics")

View File

@ -6,6 +6,7 @@ import java.io.Serializable;
/**
* the model class for the topic excel file
*/
@Deprecated
public class EXCELTopic implements Serializable {
private String rcn;
private String language;
@ -17,9 +18,27 @@ public class EXCELTopic implements Serializable {
private String title;
private String shortTitle;
private String objective;
private String subjects;
private String keywords;
private String legalBasis;
private String call;
private String id;
private String contentUpdateDate;
public String getContentUpdateDate() {
return contentUpdateDate;
}
public void setContentUpdateDate(String contentUpdateDate) {
this.contentUpdateDate = contentUpdateDate;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getRcn() {
return rcn;
@ -101,12 +120,12 @@ public class EXCELTopic implements Serializable {
this.objective = objective;
}
public String getSubjects() {
return subjects;
public String getKeywords() {
return keywords;
}
public void setSubjects(String subjects) {
this.subjects = subjects;
public void setKeywords(String keywords) {
this.keywords = keywords;
}
public String getLegalBasis() {

View File

@ -0,0 +1,38 @@
package eu.dnetlib.dhp.actionmanager.project.utils.model;
import java.io.Serializable;
/**
* @author miriam.baglioni
* @Date 28/02/23
*/
public class JsonTopic implements Serializable {
private String projectID;
private String title;
private String topic;
public String getProjectID() {
return projectID;
}
public void setProjectID(String projectID) {
this.projectID = projectID;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
}

View File

@ -0,0 +1,191 @@
package eu.dnetlib.dhp.actionmanager.project.utils.model;
import java.io.Serializable;
/**
* @author miriam.baglioni
* @Date 24/02/23
*/
public class Project implements Serializable {
private String acronym;
private String contentUpdateDate;
private String ecMaxContribution;
private String ecSignatureDate;
private String endDate;
private String frameworkProgramme;
private String fundingScheme;
private String grantDoi;
private String id;
private String legalBasis;
private String masterCall;
private String nature;
private String objective;
private String rcn;
private String startDate;
private String status;
private String subCall;
private String title;
private String topics;
private String totalCost;
public String getAcronym() {
return acronym;
}
public void setAcronym(String acronym) {
this.acronym = acronym;
}
public String getContentUpdateDate() {
return contentUpdateDate;
}
public void setContentUpdateDate(String contentUpdateDate) {
this.contentUpdateDate = contentUpdateDate;
}
public String getEcMaxContribution() {
return ecMaxContribution;
}
public void setEcMaxContribution(String ecMaxContribution) {
this.ecMaxContribution = ecMaxContribution;
}
public String getEcSignatureDate() {
return ecSignatureDate;
}
public void setEcSignatureDate(String ecSignatureDate) {
this.ecSignatureDate = ecSignatureDate;
}
public String getEndDate() {
return endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public String getFrameworkProgramme() {
return frameworkProgramme;
}
public void setFrameworkProgramme(String frameworkProgramme) {
this.frameworkProgramme = frameworkProgramme;
}
public String getFundingScheme() {
return fundingScheme;
}
public void setFundingScheme(String fundingScheme) {
this.fundingScheme = fundingScheme;
}
public String getGrantDoi() {
return grantDoi;
}
public void setGrantDoi(String grantDoi) {
this.grantDoi = grantDoi;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getLegalBasis() {
return legalBasis;
}
public void setLegalBasis(String legalBasis) {
this.legalBasis = legalBasis;
}
public String getMasterCall() {
return masterCall;
}
public void setMasterCall(String masterCall) {
this.masterCall = masterCall;
}
public String getNature() {
return nature;
}
public void setNature(String nature) {
this.nature = nature;
}
public String getObjective() {
return objective;
}
public void setObjective(String objective) {
this.objective = objective;
}
public String getRcn() {
return rcn;
}
public void setRcn(String rcn) {
this.rcn = rcn;
}
public String getStartDate() {
return startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getSubCall() {
return subCall;
}
public void setSubCall(String subCall) {
this.subCall = subCall;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getTopics() {
return topics;
}
public void setTopics(String topics) {
this.topics = topics;
}
public String getTotalCost() {
return totalCost;
}
public void setTotalCost(String totalCost) {
this.totalCost = totalCost;
}
}

View File

@ -14,7 +14,6 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.MapGroupsFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
@ -28,9 +27,7 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Measure;
import eu.dnetlib.dhp.schema.oaf.Result;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
import scala.Tuple2;
@ -76,16 +73,22 @@ public class SparkAtomicActionUsageJob implements Serializable {
isSparkSessionManaged,
spark -> {
removeOutputDir(spark, outputPath);
prepareResults(dbname, spark, workingPath);
prepareData(dbname, spark, workingPath + "/usageDb", "usage_stats", "result_id");
prepareData(dbname, spark, workingPath + "/projectDb", "project_stats", "id");
prepareData(dbname, spark, workingPath + "/datasourceDb", "datasource_stats", "repositor_id");
writeActionSet(spark, workingPath, outputPath);
});
}
public static void prepareResults(String db, SparkSession spark, String workingPath) {
private static void prepareData(String dbname, SparkSession spark, String workingPath, String tableName,
String attribute_name) {
spark
.sql(
"Select result_id, downloads, views " +
"from " + db + ".usage_stats")
String
.format(
"select %s as id, sum(downloads) as downloads, sum(views) as views " +
"from %s.%s group by %s",
attribute_name, dbname, tableName, attribute_name))
.as(Encoders.bean(UsageStatsModel.class))
.write()
.mode(SaveMode.Overwrite)
@ -94,23 +97,17 @@ public class SparkAtomicActionUsageJob implements Serializable {
}
public static void writeActionSet(SparkSession spark, String inputPath, String outputPath) {
readPath(spark, inputPath, UsageStatsModel.class)
.groupByKey((MapFunction<UsageStatsModel, String>) us -> us.getResult_id(), Encoders.STRING())
.mapGroups((MapGroupsFunction<String, UsageStatsModel, Result>) (k, it) -> {
UsageStatsModel first = it.next();
it.forEachRemaining(us -> {
first.setDownloads(first.getDownloads() + us.getDownloads());
first.setViews(first.getViews() + us.getViews());
});
Result res = new Result();
res.setId("50|" + k);
res.setMeasures(getMeasure(first.getDownloads(), first.getViews()));
return res;
}, Encoders.bean(Result.class))
getFinalIndicatorsResult(spark, inputPath + "/usageDb")
.toJavaRDD()
.map(p -> new AtomicAction(p.getClass(), p))
.union(
getFinalIndicatorsProject(spark, inputPath + "/projectDb")
.toJavaRDD()
.map(p -> new AtomicAction(p.getClass(), p)))
.union(
getFinalIndicatorsDatasource(spark, inputPath + "/datasourceDb")
.toJavaRDD()
.map(p -> new AtomicAction(p.getClass(), p)))
.mapToPair(
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
@ -118,6 +115,39 @@ public class SparkAtomicActionUsageJob implements Serializable {
}
private static Dataset<Result> getFinalIndicatorsResult(SparkSession spark, String inputPath) {
return readPath(spark, inputPath, UsageStatsModel.class)
.map((MapFunction<UsageStatsModel, Result>) usm -> {
Result r = new Result();
r.setId("50|" + usm.getId());
r.setMeasures(getMeasure(usm.getDownloads(), usm.getViews()));
return r;
}, Encoders.bean(Result.class));
}
private static Dataset<Project> getFinalIndicatorsProject(SparkSession spark, String inputPath) {
return readPath(spark, inputPath, UsageStatsModel.class)
.map((MapFunction<UsageStatsModel, Project>) usm -> {
Project p = new Project();
p.setId("40|" + usm.getId());
p.setMeasures(getMeasure(usm.getDownloads(), usm.getViews()));
return p;
}, Encoders.bean(Project.class));
}
private static Dataset<Datasource> getFinalIndicatorsDatasource(SparkSession spark, String inputPath) {
return readPath(spark, inputPath, UsageStatsModel.class)
.map((MapFunction<UsageStatsModel, Datasource>) usm -> {
Datasource d = new Datasource();
d.setId("10|" + usm.getId());
d.setMeasures(getMeasure(usm.getDownloads(), usm.getViews()));
return d;
}, Encoders.bean(Datasource.class));
}
private static List<Measure> getMeasure(Long downloads, Long views) {
DataInfo dataInfo = OafMapperUtils
.dataInfo(

View File

@ -4,16 +4,16 @@ package eu.dnetlib.dhp.actionmanager.usagestats;
import java.io.Serializable;
public class UsageStatsModel implements Serializable {
private String result_id;
private String id;
private Long downloads;
private Long views;
public String getResult_id() {
return result_id;
public String getId() {
return id;
}
public void setResult_id(String result_id) {
this.result_id = result_id;
public void setId(String id) {
this.id = id;
}
public Long getDownloads() {

View File

@ -86,7 +86,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Produces the unresolved from bip finder!</name>
<name>Produces the unresolved from BIP! Finder</name>
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareBipFinder</class>
<jar>dhp-aggregation-${projectVersion}.jar</jar>
<spark-opts>
@ -135,7 +135,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Produces the unresolved from FOS!</name>
<name>Produces the unresolved from FOS</name>
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareFOSSparkJob</class>
<jar>dhp-aggregation-${projectVersion}.jar</jar>
<spark-opts>
@ -185,7 +185,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Produces the unresolved from FOS!</name>
<name>Produces the unresolved from FOS</name>
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareSDGSparkJob</class>
<jar>dhp-aggregation-${projectVersion}.jar</jar>
<spark-opts>

View File

@ -0,0 +1,23 @@
[
{
"paramName": "ip",
"paramLongName": "inputPath",
"paramDescription": "the path where the projects are stored ",
"paramRequired": true
},
{
"paramName": "op",
"paramLongName": "outputPath",
"paramDescription": "the path for the extracted folder",
"paramRequired": true
},
{
"paramName": "hnn",
"paramLongName": "hdfsNameNode",
"paramDescription": "the hdfs namenode",
"paramRequired": true
}
]

View File

@ -0,0 +1,3 @@
#!/bin/bash
hdfs dfs -rm $2
curl -LSs $1 | hdfs dfs -put - $2

View File

@ -1,27 +1,9 @@
<workflow-app name="H2020Classification" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>projectFileURL</name>
<description>the url where to get the projects file</description>
</property>
<property>
<name>programmeFileURL</name>
<description>the url where to get the programme file</description>
</property>
<property>
<name>topicFileURL</name>
<description>the url where to get the topic file</description>
</property>
<property>
<name>outputPath</name>
<description>path where to store the action set</description>
</property>
<property>
<name>sheetName</name>
<description>the name of the sheet to read</description>
</property>
</parameters>
<start to="deleteoutputpath"/>
@ -35,40 +17,103 @@
<delete path='${workingDir}'/>
<mkdir path='${workingDir}'/>
</fs>
<ok to="fork_get_info"/>
<ok to="fork_download_info"/>
<error to="Kill"/>
</action>
<fork name="fork_get_info">
<fork name="fork_download_info">
<path start="fork_get_projects"/>
<path start="get_programme_file"/>
<path start="get_topic_file"/>
<path start="download_programme_file"/>
</fork>
<fork name="fork_get_projects">
<path start="get_project_file"/>
<path start="read_projects"/>
<path start="download_projects"/>
<path start="read_projects_from_db"/>
</fork>
<action name="get_project_file">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ReadCSV</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--fileURL</arg><arg>${projectFileURL}</arg>
<arg>--hdfsPath</arg><arg>${workingDir}/projects</arg>
<arg>--classForName</arg><arg>eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject</arg>
</java>
<ok to="wait_projects"/>
<action name="download_projects">
<shell xmlns="uri:oozie:shell-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapred.job.queue.name</name>
<value>${queueName}</value>
</property>
</configuration>
<exec>download.sh</exec>
<argument>${downloadH2020Projects}</argument>
<argument>${projectPath}</argument>
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
<file>download.sh</file>
<capture-output/>
</shell>
<ok to="extract_projects"/>
<error to="Kill"/>
</action>
<action name="get_programme_file">
<action name="extract_projects">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ExtractFromZip</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--inputPath</arg><arg>${projectPath}</arg>
<arg>--outputPath</arg><arg>${workingDir}/</arg>
</java>
<ok to="read_from_folder"/>
<error to="Kill"/>
</action>
<fork name="read_from_folder">
<path start="read_projects"/>
<path start="read_topic_file"/>
</fork>
<action name="read_projects">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ReadProjects</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--inputPath</arg><arg>${workingDir}/json/project.json</arg>
<arg>--outputPath</arg><arg>${workingDir}/projects</arg>
</java>
<ok to="wait_read_from_folder"/>
<error to="Kill"/>
</action>
<action name="download_programme_file">
<shell xmlns="uri:oozie:shell-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapred.job.queue.name</name>
<value>${queueName}</value>
</property>
</configuration>
<exec>download.sh</exec>
<argument>${downloadH2020Programme}</argument>
<argument>${programmePath}</argument>
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
<file>download.sh</file>
<capture-output/>
</shell>
<ok to="extract_programme"/>
<error to="Kill"/>
</action>
<action name="extract_programme">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ExtractFromZip</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--inputPath</arg><arg>${programmePath}</arg>
<arg>--outputPath</arg><arg>${workingDir}/downloadedProgramme/</arg>
</java>
<ok to="read_programme"/>
<error to="Kill"/>
</action>
<action name="read_programme">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ReadCSV</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--fileURL</arg><arg>${programmeFileURL}</arg>
<arg>--fileURL</arg><arg>${workingDir}/downloadedProgramme/csv/programme.csv</arg>
<arg>--hdfsPath</arg><arg>${workingDir}/programme</arg>
<arg>--classForName</arg><arg>eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProgramme</arg>
</java>
@ -76,20 +121,18 @@
<error to="Kill"/>
</action>
<action name="get_topic_file">
<action name="read_topic_file">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ReadExcel</main-class>
<main-class>eu.dnetlib.dhp.actionmanager.project.utils.ReadTopics</main-class>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--fileURL</arg><arg>${topicFileURL}</arg>
<arg>--hdfsPath</arg><arg>${workingDir}/topic</arg>
<arg>--sheetName</arg><arg>${sheetName}</arg>
<arg>--classForName</arg><arg>eu.dnetlib.dhp.actionmanager.project.utils.model.EXCELTopic</arg>
<arg>--inputPath</arg><arg>${workingDir}/json/topics.json</arg>
<arg>--outputPath</arg><arg>${workingDir}/topic</arg>
</java>
<ok to="wait"/>
<ok to="wait_read_from_folder"/>
<error to="Kill"/>
</action>
<action name="read_projects">
<action name="read_projects_from_db">
<java>
<main-class>eu.dnetlib.dhp.actionmanager.project.ReadProjectsFromDB</main-class>
<arg>--hdfsPath</arg><arg>${workingDir}/dbProjects</arg>
@ -123,9 +166,11 @@
<arg>--outputPath</arg><arg>${workingDir}/preparedProgramme</arg>
</spark>
<ok to="wait"/>
<!-- <ok to="End"/>-->
<error to="Kill"/>
</action>
<join name="wait_read_from_folder" to="wait_projects"/>
<join name="wait" to="create_updates"/>
<join name="wait_projects" to="prepare_project"/>
@ -153,6 +198,7 @@
<arg>--dbProjectPath</arg><arg>${workingDir}/dbProjects</arg>
</spark>
<ok to="wait"/>
<!-- <ok to="End"/>-->
<error to="Kill"/>
</action>

View File

@ -0,0 +1,23 @@
[
{
"paramName": "ip",
"paramLongName": "inputPath",
"paramDescription": "the path where the projects are stored ",
"paramRequired": true
},
{
"paramName": "op",
"paramLongName": "outputPath",
"paramDescription": "the path for the extracted folder",
"paramRequired": true
},
{
"paramName": "hnn",
"paramLongName": "hdfsNameNode",
"paramDescription": "the hdfs namenode",
"paramRequired": true
}
]

View File

@ -89,7 +89,7 @@
<arg>--hive_metastore_uris</arg><arg>${hiveMetastoreUris}</arg>
<arg>--outputPath</arg><arg>${outputPath}</arg>
<arg>--usagestatsdb</arg><arg>${usagestatsdb}</arg>
<arg>--workingPath</arg><arg>${workingDir}/usageDb</arg>
<arg>--workingPath</arg><arg>${workingDir}</arg>
</spark>
<ok to="End"/>
<error to="Kill"/>

View File

@ -79,16 +79,6 @@ object DataciteModelConstants {
OafMapperUtils.keyValue(ModelConstants.DATACITE_ID, DATACITE_NAME)
val subRelTypeMapping: Map[String, OAFRelations] = Map(
ModelConstants.REFERENCES -> OAFRelations(
ModelConstants.REFERENCES,
ModelConstants.IS_REFERENCED_BY,
ModelConstants.RELATIONSHIP
),
ModelConstants.IS_REFERENCED_BY -> OAFRelations(
ModelConstants.IS_REFERENCED_BY,
ModelConstants.REFERENCES,
ModelConstants.RELATIONSHIP
),
ModelConstants.IS_SUPPLEMENTED_BY -> OAFRelations(
ModelConstants.IS_SUPPLEMENTED_BY,
ModelConstants.IS_SUPPLEMENT_TO,
@ -164,16 +154,6 @@ object DataciteModelConstants {
ModelConstants.IS_SOURCE_OF,
ModelConstants.VERSION
),
ModelConstants.CITES -> OAFRelations(
ModelConstants.CITES,
ModelConstants.IS_CITED_BY,
ModelConstants.CITATION
),
ModelConstants.IS_CITED_BY -> OAFRelations(
ModelConstants.IS_CITED_BY,
ModelConstants.CITES,
ModelConstants.CITATION
),
ModelConstants.IS_VARIANT_FORM_OF -> OAFRelations(
ModelConstants.IS_VARIANT_FORM_OF,
ModelConstants.IS_DERIVED_FROM,

View File

@ -290,6 +290,7 @@ object DataciteToOAFTransformation {
collectedFrom: KeyValue,
di: DataInfo
): Relation = {
val r = new Relation
r.setSource(sourceId)
r.setTarget(targetId)
@ -619,7 +620,7 @@ object DataciteToOAFTransformation {
id: String,
date: String
): List[Relation] = {
rels
val bidirectionalRels: List[Relation] = rels
.filter(r =>
subRelTypeMapping
.contains(r.relationType) && (r.relatedIdentifierType.equalsIgnoreCase("doi") ||
@ -627,26 +628,46 @@ object DataciteToOAFTransformation {
r.relatedIdentifierType.equalsIgnoreCase("arxiv"))
)
.map(r => {
val rel = new Relation
rel.setProvenance(Lists.newArrayList(OafMapperUtils.getProvenance(DATACITE_COLLECTED_FROM, relDataInfo)))
val subRelType = subRelTypeMapping(r.relationType).relType
rel.setRelType(REL_TYPE_VALUE)
rel.setSubRelType(subRelType)
rel.setRelClass(r.relationType)
val dateProps: KeyValue = OafMapperUtils.keyValue(DATE_RELATION_KEY, date)
rel.setProperties(List(dateProps).asJava)
rel.setSource(id)
rel.setTarget(
DHPUtils.generateUnresolvedIdentifier(r.relatedIdentifier, r.relatedIdentifierType)
)
rel
val target = DHPUtils.generateUnresolvedIdentifier(r.relatedIdentifier, r.relatedIdentifierType)
relation(id, target, subRelType, r.relationType, date)
})
val citationRels: List[Relation] = rels
.filter(r =>
(r.relatedIdentifierType.equalsIgnoreCase("doi") ||
r.relatedIdentifierType.equalsIgnoreCase("pmid") ||
r.relatedIdentifierType.equalsIgnoreCase("arxiv")) &&
(r.relationType.toLowerCase.contains("cite") || r.relationType.toLowerCase.contains("reference"))
)
.map(r => {
r.relationType match {
case ModelConstants.CITES | ModelConstants.REFERENCES =>
val target = DHPUtils.generateUnresolvedIdentifier(r.relatedIdentifier, r.relatedIdentifierType)
relation(id, target, ModelConstants.CITATION, ModelConstants.CITES, date)
case ModelConstants.IS_CITED_BY | ModelConstants.IS_REFERENCED_BY =>
val source = DHPUtils.generateUnresolvedIdentifier(r.relatedIdentifier, r.relatedIdentifierType)
relation(source, id, ModelConstants.CITATION, ModelConstants.CITES, date)
}
})
citationRels ::: bidirectionalRels
}
def relation(source: String, target: String, subRelType: String, relClass: String, date: String): Relation = {
val rel = new Relation
rel.setProvenance(Lists.newArrayList(OafMapperUtils.getProvenance(DATACITE_COLLECTED_FROM, relDataInfo)))
rel.setRelType(REL_TYPE_VALUE)
rel.setSubRelType(subRelType)
rel.setRelClass(relClass)
val dateProps: KeyValue = OafMapperUtils.keyValue(DATE_RELATION_KEY, date)
rel.setProperties(List(dateProps).asJava)
rel.setSource(source)
rel.setTarget(target)
rel
}
def generateDSId(input: String): String = {

View File

@ -0,0 +1,99 @@
package eu.dnetlib.dhp.actionmanager.createunresolvedentities;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.FOSDataModel;
/**
* @author miriam.baglioni
* @Date 13/02/23
*/
public class GetFosTest {
private static final Logger log = LoggerFactory.getLogger(ProduceTest.class);
private static Path workingDir;
private static SparkSession spark;
private static LocalFileSystem fs;
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files.createTempDirectory(PrepareTest.class.getSimpleName());
fs = FileSystem.getLocal(new Configuration());
log.info("using work dir {}", workingDir);
SparkConf conf = new SparkConf();
conf.setAppName(ProduceTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(PrepareTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
void test3() throws Exception {
final String sourcePath = getClass()
.getResource("/eu/dnetlib/dhp/actionmanager/createunresolvedentities/fos/fos_sbs.tsv")
.getPath();
final String outputPath = workingDir.toString() + "/fos.json";
GetFOSSparkJob
.main(
new String[] {
"--isSparkSessionManaged", Boolean.FALSE.toString(),
"--sourcePath", sourcePath,
"-outputPath", outputPath
});
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<FOSDataModel> tmp = sc
.textFile(outputPath)
.map(item -> OBJECT_MAPPER.readValue(item, FOSDataModel.class));
tmp.foreach(t -> Assertions.assertTrue(t.getDoi() != null));
tmp.foreach(t -> Assertions.assertTrue(t.getLevel1() != null));
tmp.foreach(t -> Assertions.assertTrue(t.getLevel2() != null));
tmp.foreach(t -> Assertions.assertTrue(t.getLevel3() != null));
}
}

View File

@ -1,6 +1,8 @@
package eu.dnetlib.dhp.actionmanager.project;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@ -16,6 +18,7 @@ import eu.dnetlib.dhp.actionmanager.project.utils.EXCELParser;
import eu.dnetlib.dhp.common.collection.CollectorException;
import eu.dnetlib.dhp.common.collection.HttpConnector2;
@Deprecated
@Disabled
public class EXCELParserTest {
@ -43,4 +46,21 @@ public class EXCELParserTest {
Assertions.assertEquals(3878, pl.size());
}
@Test
void test2() throws IOException, ClassNotFoundException, InvalidFormatException, IllegalAccessException,
InstantiationException {
;
EXCELParser excelParser = new EXCELParser();
List<Object> pl = excelParser
.parse(
new FileInputStream(
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/h2020_topic.xlsx").getPath()),
"eu.dnetlib.dhp.actionmanager.project.utils.model.EXCELTopic",
"DATA");
Assertions.assertEquals(3905, pl.size());
}
}

View File

@ -73,7 +73,7 @@ public class PrepareH2020ProgrammeTest {
"-isSparkSessionManaged",
Boolean.FALSE.toString(),
"-programmePath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/whole_programme.json.gz").getPath(),
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/h2020_programme.json.gz").getPath(),
"-outputPath",
workingDir.toString() + "/preparedProgramme"
});
@ -84,7 +84,7 @@ public class PrepareH2020ProgrammeTest {
.textFile(workingDir.toString() + "/preparedProgramme")
.map(item -> OBJECT_MAPPER.readValue(item, CSVProgramme.class));
Assertions.assertEquals(277, tmp.count());
Assertions.assertEquals(279, tmp.count());
Dataset<CSVProgramme> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class));

View File

@ -4,12 +4,14 @@ package eu.dnetlib.dhp.actionmanager.project;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
@ -20,9 +22,12 @@ import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
public class PrepareProjectTest {
@ -74,7 +79,7 @@ public class PrepareProjectTest {
"-isSparkSessionManaged",
Boolean.FALSE.toString(),
"-projectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(),
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_nld.json.gz").getPath(),
"-outputPath",
workingDir.toString() + "/preparedProjects",
"-dbProjectPath",
@ -94,6 +99,12 @@ public class PrepareProjectTest {
Assertions.assertEquals(0, verificationDataset.filter("length(id) = 0").count());
Assertions.assertEquals(0, verificationDataset.filter("length(programme) = 0").count());
Assertions.assertEquals(0, verificationDataset.filter("length(topics) = 0").count());
CSVProject project = tmp.filter(p -> p.getId().equals("886828")).first();
Assertions.assertEquals("H2020-EU.2.3.", project.getProgramme());
Assertions.assertEquals("EIC-SMEInst-2018-2020", project.getTopics());
}
}

View File

@ -1,12 +1,10 @@
package eu.dnetlib.dhp.actionmanager.project;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.*;
import java.nio.file.Files;
import org.apache.commons.io.FileUtils;
@ -24,7 +22,7 @@ import eu.dnetlib.dhp.common.collection.CollectorException;
import eu.dnetlib.dhp.common.collection.GetCSV;
import eu.dnetlib.dhp.common.collection.HttpConnector2;
public class DownloadCsvTest {
public class ReadProgrammeTest {
private static String workingDir;
@ -33,22 +31,25 @@ public class DownloadCsvTest {
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(DownloadCsvTest.class.getSimpleName())
.createTempDirectory(ReadProgrammeTest.class.getSimpleName())
.toString();
fs = FileSystem.getLocal(new Configuration());
}
@Disabled
@Test
void getProgrammeFileTest() throws Exception {
@AfterAll
public static void cleanup() {
FileUtils.deleteQuietly(new File(workingDir));
}
String fileURL = "https://cordis.europa.eu/data/reference/cordisref-h2020programmes.csv";
@Test
void getLocalProgrammeFileTest() throws Exception {
GetCSV
.getCsv(
fs, new BufferedReader(
new InputStreamReader(new HttpConnector2().getInputSourceAsStream(fileURL))),
new FileReader(
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/h2020_programme.csv").getPath())),
workingDir + "/programme",
CSVProgramme.class.getName(), ';');
@ -56,10 +57,11 @@ public class DownloadCsvTest {
String line;
int count = 0;
ObjectMapper OBJECT_MAPPER = new ObjectMapper();
while ((line = in.readLine()) != null) {
CSVProgramme csvp = new ObjectMapper().readValue(line, CSVProgramme.class);
if (count == 0) {
assertTrue(csvp.getCode().equals("H2020-EU.5.f."));
CSVProgramme csvp = OBJECT_MAPPER.readValue(line, CSVProgramme.class);
if (count == 528) {
assertEquals("H2020-EU.5.f.", csvp.getCode());
assertTrue(
csvp
.getTitle()
@ -69,8 +71,8 @@ public class DownloadCsvTest {
assertTrue(csvp.getShortTitle().equals(""));
assertTrue(csvp.getLanguage().equals("en"));
}
if (count == 28) {
assertTrue(csvp.getCode().equals("H2020-EU.3.5.4."));
if (count == 11) {
assertEquals("H2020-EU.3.5.4.", csvp.getCode());
assertTrue(
csvp
.getTitle()
@ -79,7 +81,7 @@ public class DownloadCsvTest {
assertTrue(csvp.getShortTitle().equals("A green economy and society through eco-innovation"));
assertTrue(csvp.getLanguage().equals("de"));
}
if (count == 229) {
if (count == 34) {
assertTrue(csvp.getCode().equals("H2020-EU.3.2."));
assertTrue(
csvp
@ -95,54 +97,7 @@ public class DownloadCsvTest {
count += 1;
}
Assertions.assertEquals(767, count);
}
@Disabled
@Test
void getProjectFileTest() throws IOException, CollectorException, ClassNotFoundException {
String fileURL = "https://cordis.europa.eu/data/cordis-h2020projects.csv";
GetCSV
.getCsv(
fs,
new BufferedReader(new InputStreamReader(new HttpConnector2().getInputSourceAsStream(fileURL))),
workingDir + "/projects",
CSVProject.class.getName(), ';');
BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(new Path(workingDir + "/projects"))));
String line;
int count = 0;
while ((line = in.readLine()) != null) {
CSVProject csvp = new ObjectMapper().readValue(line, CSVProject.class);
if (count == 0) {
assertTrue(csvp.getId().equals("771736"));
assertTrue(csvp.getProgramme().equals("H2020-EU.1.1."));
assertTrue(csvp.getTopics().equals("ERC-2017-COG"));
}
if (count == 22882) {
assertTrue(csvp.getId().equals("752903"));
assertTrue(csvp.getProgramme().equals("H2020-EU.1.3.2."));
assertTrue(csvp.getTopics().equals("MSCA-IF-2016"));
}
if (count == 223023) {
assertTrue(csvp.getId().equals("861952"));
assertTrue(csvp.getProgramme().equals("H2020-EU.4.e."));
assertTrue(csvp.getTopics().equals("SGA-SEWP-COST-2019"));
}
assertTrue(csvp.getId() != null);
assertTrue(csvp.getProgramme().startsWith("H2020"));
count += 1;
}
Assertions.assertEquals(34957, count);
}
@AfterAll
public static void cleanup() {
FileUtils.deleteQuietly(new File(workingDir));
assertEquals(769, count);
}
}

View File

@ -0,0 +1,104 @@
package eu.dnetlib.dhp.actionmanager.project;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.utils.ReadProjects;
import eu.dnetlib.dhp.actionmanager.project.utils.model.CSVProject;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
/**
* @author miriam.baglioni
* @Date 01/03/23
*/
public class ReadProjectsTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static Path workingDir;
private static LocalFileSystem fs;
private static SparkSession spark;
private static final Logger log = LoggerFactory
.getLogger(ReadProjectsTest.class);
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(ReadProjectsTest.class.getSimpleName());
fs = FileSystem.getLocal(new Configuration());
SparkConf conf = new SparkConf();
conf.setAppName(PrepareProjectTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(PrepareProjectTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Test
void readProjects() throws IOException {
String projects = getClass()
.getResource("/eu/dnetlib/dhp/actionmanager/project/projects.json")
.getPath();
ReadProjects.readProjects(projects, workingDir.toString() + "/projects", fs);
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
JavaRDD<Project> tmp = sc
.textFile(workingDir.toString() + "/projects")
.map(item -> OBJECT_MAPPER.readValue(item, Project.class));
Assertions.assertEquals(19, tmp.count());
Project project = tmp.filter(p -> p.getAcronym().equals("GiSTDS")).first();
Assertions.assertEquals("2022-10-08 18:28:27", project.getContentUpdateDate());
Assertions.assertEquals("894593", project.getId());
Assertions.assertEquals("H2020-EU.1.3.", project.getLegalBasis());
Assertions.assertEquals("MSCA-IF-2019", project.getTopics());
// tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
}
}

View File

@ -0,0 +1,99 @@
package eu.dnetlib.dhp.actionmanager.project;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.project.utils.ReadProjects;
import eu.dnetlib.dhp.actionmanager.project.utils.ReadTopics;
import eu.dnetlib.dhp.actionmanager.project.utils.model.JsonTopic;
import eu.dnetlib.dhp.actionmanager.project.utils.model.Project;
/**
* @author miriam.baglioni
* @Date 01/03/23
*/
public class ReadTopicTest {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static Path workingDir;
private static LocalFileSystem fs;
private static SparkSession spark;
private static final Logger log = LoggerFactory
.getLogger(ReadTopicTest.class);
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files
.createTempDirectory(ReadTopicTest.class.getSimpleName());
fs = FileSystem.getLocal(new Configuration());
SparkConf conf = new SparkConf();
conf.setAppName(PrepareProjectTest.class.getSimpleName());
conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", workingDir.toString());
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
spark = SparkSession
.builder()
.appName(PrepareProjectTest.class.getSimpleName())
.config(conf)
.getOrCreate();
}
@AfterAll
public static void afterAll() throws IOException {
FileUtils.deleteDirectory(workingDir.toFile());
spark.stop();
}
@Disabled
@Test
void readTopics() throws IOException {
String topics = getClass()
.getResource("/eu/dnetlib/dhp/actionmanager/project/topics.json")
.getPath();
ReadTopics.readTopics(topics, workingDir.toString() + "/topics", fs);
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
JavaRDD<JsonTopic> tmp = sc
.textFile(workingDir.toString() + "/topics")
.map(item -> OBJECT_MAPPER.readValue(item, JsonTopic.class));
// Assertions.assertEquals(16, tmp.count());
JsonTopic topic = tmp.filter(t -> t.getProjectID().equals("886988")).first();
Assertions.assertEquals("Individual Fellowships", topic.getTitle());
Assertions.assertEquals("MSCA-IF-2019", topic.getTopic());
// tmp.foreach(p -> System.out.println(OBJECT_MAPPER.writeValueAsString(p)));
}
}

View File

@ -11,6 +11,7 @@ import org.apache.hadoop.io.Text;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
@ -78,12 +79,12 @@ public class SparkUpdateProjectTest {
"-programmePath",
getClass()
.getResource(
"/eu/dnetlib/dhp/actionmanager/project/preparedProgramme_whole.json")
"/eu/dnetlib/dhp/actionmanager/project/prepared_h2020_programme.json.gz")
.getPath(),
"-projectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json").getPath(),
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/prepared_projects.json.gz").getPath(),
"-topicPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/topic.json.gz").getPath(),
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/topics_nld.json.gz").getPath(),
"-outputPath",
workingDir.toString() + "/actionSet"
});
@ -266,6 +267,7 @@ public class SparkUpdateProjectTest {
.get(1)
.getString(0)
.equals("H2020-EU.2.1.4."));
Assertions
.assertTrue(
execverification

View File

@ -8,6 +8,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.oaf.Entity;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.io.Text;
import org.apache.spark.SparkConf;
@ -68,31 +69,33 @@ public class SparkAtomicActionCountJobTest {
@Test
void testMatch() {
String usageScoresPath = getClass()
.getResource("/eu/dnetlib/dhp/actionmanager/usagestats/usagestatsdb")
.getResource("/eu/dnetlib/dhp/actionmanager/usagestats")
.getPath();
SparkAtomicActionUsageJob.writeActionSet(spark, usageScoresPath, workingDir.toString() + "/actionSet");
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Result> tmp = sc
JavaRDD<AtomicAction> tmp = sc
.sequenceFile(workingDir.toString() + "/actionSet", Text.class, Text.class)
.map(usm -> OBJECT_MAPPER.readValue(usm._2.getBytes(), AtomicAction.class))
.map(aa -> (Result) aa.getPayload());
.map(usm -> OBJECT_MAPPER.readValue(usm._2.getBytes(), AtomicAction.class));
// .map(aa -> (Result) aa.getPayload());
Assertions.assertEquals(9, tmp.count());
Assertions.assertEquals(9, tmp.filter(aa -> ((Entity) aa.getPayload()).getId().startsWith("50|")).count());
Assertions.assertEquals(9, tmp.filter(aa -> ((Entity) aa.getPayload()).getId().startsWith("10|")).count());
Assertions.assertEquals(9, tmp.filter(aa -> ((Entity) aa.getPayload()).getId().startsWith("40|")).count());
tmp.foreach(r -> Assertions.assertEquals(2, r.getMeasures().size()));
tmp.foreach(r -> Assertions.assertEquals(2, ((Entity) r.getPayload()).getMeasures().size()));
tmp
.foreach(
r -> r
r -> ((Entity) r.getPayload())
.getMeasures()
.stream()
.forEach(
m -> m.getUnit().stream().forEach(u -> Assertions.assertTrue(u.getDataInfo().getInferred()))));
tmp
.foreach(
r -> r
r -> ((Entity) r.getPayload())
.getMeasures()
.stream()
.forEach(
@ -106,7 +109,7 @@ public class SparkAtomicActionCountJobTest {
u.getDataInfo().getProvenanceaction().getClassid()))));
tmp
.foreach(
r -> r
r -> ((Entity) r.getPayload())
.getMeasures()
.stream()
.forEach(
@ -121,7 +124,7 @@ public class SparkAtomicActionCountJobTest {
tmp
.foreach(
r -> r
r -> ((Entity) r.getPayload())
.getMeasures()
.stream()
.forEach(
@ -136,12 +139,19 @@ public class SparkAtomicActionCountJobTest {
Assertions
.assertEquals(
1, tmp.filter(r -> r.getId().equals("50|dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6")).count());
1,
tmp
.filter(
r -> ((Entity) r.getPayload())
.getId()
.equals("50|dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6"))
.count());
Assertions
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
@ -157,6 +167,7 @@ public class SparkAtomicActionCountJobTest {
.assertEquals(
"5",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
@ -173,6 +184,7 @@ public class SparkAtomicActionCountJobTest {
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|doi_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
@ -188,6 +200,7 @@ public class SparkAtomicActionCountJobTest {
.assertEquals(
"1",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|doi_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
@ -204,6 +217,7 @@ public class SparkAtomicActionCountJobTest {
.assertEquals(
"2",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|doi_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
@ -219,6 +233,7 @@ public class SparkAtomicActionCountJobTest {
.assertEquals(
"6",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("50|doi_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
@ -230,6 +245,204 @@ public class SparkAtomicActionCountJobTest {
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f1__________::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"5",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f1__________::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f11_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"1",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f11_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"2",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f12_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"6",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("40|f12_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d1__________::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"5",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d1__________::53575dc69e9ace947e02d47ecd54a7a6"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"0",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d11_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"1",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d11_________::17eda2ff77407538fbe5d3d719b9d1c0"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"2",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d12_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("downloads"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
Assertions
.assertEquals(
"6",
tmp
.map(r -> ((Entity) r.getPayload()))
.filter(r -> r.getId().equals("10|d12_________::3085e4c6e051378ca6157fe7f0430c1f"))
.collect()
.get(0)
.getMeasures()
.stream()
.filter(m -> m.getId().equals("views"))
.collect(Collectors.toList())
.get(0)
.getUnit()
.get(0)
.getValue());
}
}

View File

@ -0,0 +1,40 @@
doi level1 level2 level3
10.1080/09638237.2018.1466033 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1016/j.dsi.2015.10.003 03 medical and health sciences 0301 basic medicine 030105 genetics & heredity
10.1007/s10072-017-2914-9 03 medical and health sciences 0302 clinical medicine 030217 neurology & neurosurgery
10.1016/j.bspc.2021.102726 02 engineering and technology 0206 medical engineering 020601 biomedical engineering
10.1177/0306312706069439 06 humanities and the arts 0601 history and archaeology 060101 anthropology
10.1016/j.jacep.2016.05.010 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1111/anae.13418 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1142/s1793744210000168 01 natural sciences 0103 physical sciences 010306 general physics
10.1016/j.jadohealth.2019.04.029 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1109/icais50930.2021.9395847 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020201 artificial intelligence & image processing
10.1145/3154837 01 natural sciences 0101 mathematics 010102 general mathematics
10.1038/srep38130 03 medical and health sciences 0301 basic medicine 030106 microbiology
10.1007/s13369-017-2871-x 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020201 artificial intelligence & image processing
10.1063/1.4964718 03 medical and health sciences 0301 basic medicine 030104 developmental biology
10.1007/s12603-019-1276-9 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1002/cam4.1463 03 medical and health sciences 0301 basic medicine 030104 developmental biology
10.1164/rccm.201611-2290ed 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1088/1757-899x/225/1/012132 01 natural sciences 0105 earth and related environmental sciences 010504 meteorology & atmospheric sciences
10.1117/1.jmm.15.1.015501 02 engineering and technology 0210 nano-technology 021001 nanoscience & nanotechnology
10.1088/1361-6587/ab569d 01 natural sciences 0103 physical sciences 010303 astronomy & astrophysics
10.1016/j.rser.2015.11.092 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020209 energy
10.1016/j.jhydrol.2013.06.035 01 natural sciences 0105 earth and related environmental sciences 010504 meteorology & atmospheric sciences
10.1111/php.12892 03 medical and health sciences 0301 basic medicine 030104 developmental biology
10.1088/0264-9381/27/10/105001 01 natural sciences 0103 physical sciences 010308 nuclear & particles physics
10.1016/j.matchemphys.2018.02.039 02 engineering and technology 0210 nano-technology 021001 nanoscience & nanotechnology
10.1098/rsos.160993 03 medical and health sciences 0301 basic medicine 030104 developmental biology
10.1016/j.rinp.2017.07.054 02 engineering and technology 0209 industrial biotechnology 020901 industrial engineering & automation
10.1111/eip.12348 03 medical and health sciences 0302 clinical medicine 030227 psychiatry
10.20965/jrm.2016.p0371 02 engineering and technology 0201 civil engineering 020101 civil engineering
10.2337/dci19-0036 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
10.1155/2018/7692913 01 natural sciences 0104 chemical sciences 010404 medicinal & biomolecular chemistry
10.1117/12.2262306 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020206 networking & telecommunications
10.1021/acs.jpcb.7b01885 01 natural sciences 0104 chemical sciences 010405 organic chemistry
10.1177/0033294117711131 05 social sciences 0502 economics and business 050203 business & management
10.1016/j.jrurstud.2017.08.019 05 social sciences 0502 economics and business 050203 business & management
10.1111/febs.15296 03 medical and health sciences 0301 basic medicine 030104 developmental biology
10.3923/jeasci.2017.6922.6927 05 social sciences 0505 law 050501 criminology
10.1007/s10854-017-6376-x 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020208 electrical & electronic engineering
10.3390/app10176095 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020209 energy
1 doi level1 level2 level3
2 10.1080/09638237.2018.1466033 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
3 10.1016/j.dsi.2015.10.003 03 medical and health sciences 0301 basic medicine 030105 genetics & heredity
4 10.1007/s10072-017-2914-9 03 medical and health sciences 0302 clinical medicine 030217 neurology & neurosurgery
5 10.1016/j.bspc.2021.102726 02 engineering and technology 0206 medical engineering 020601 biomedical engineering
6 10.1177/0306312706069439 06 humanities and the arts 0601 history and archaeology 060101 anthropology
7 10.1016/j.jacep.2016.05.010 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
8 10.1111/anae.13418 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
9 10.1142/s1793744210000168 01 natural sciences 0103 physical sciences 010306 general physics
10 10.1016/j.jadohealth.2019.04.029 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
11 10.1109/icais50930.2021.9395847 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020201 artificial intelligence & image processing
12 10.1145/3154837 01 natural sciences 0101 mathematics 010102 general mathematics
13 10.1038/srep38130 03 medical and health sciences 0301 basic medicine 030106 microbiology
14 10.1007/s13369-017-2871-x 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020201 artificial intelligence & image processing
15 10.1063/1.4964718 03 medical and health sciences 0301 basic medicine 030104 developmental biology
16 10.1007/s12603-019-1276-9 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
17 10.1002/cam4.1463 03 medical and health sciences 0301 basic medicine 030104 developmental biology
18 10.1164/rccm.201611-2290ed 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
19 10.1088/1757-899x/225/1/012132 01 natural sciences 0105 earth and related environmental sciences 010504 meteorology & atmospheric sciences
20 10.1117/1.jmm.15.1.015501 02 engineering and technology 0210 nano-technology 021001 nanoscience & nanotechnology
21 10.1088/1361-6587/ab569d 01 natural sciences 0103 physical sciences 010303 astronomy & astrophysics
22 10.1016/j.rser.2015.11.092 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020209 energy
23 10.1016/j.jhydrol.2013.06.035 01 natural sciences 0105 earth and related environmental sciences 010504 meteorology & atmospheric sciences
24 10.1111/php.12892 03 medical and health sciences 0301 basic medicine 030104 developmental biology
25 10.1088/0264-9381/27/10/105001 01 natural sciences 0103 physical sciences 010308 nuclear & particles physics
26 10.1016/j.matchemphys.2018.02.039 02 engineering and technology 0210 nano-technology 021001 nanoscience & nanotechnology
27 10.1098/rsos.160993 03 medical and health sciences 0301 basic medicine 030104 developmental biology
28 10.1016/j.rinp.2017.07.054 02 engineering and technology 0209 industrial biotechnology 020901 industrial engineering & automation
29 10.1111/eip.12348 03 medical and health sciences 0302 clinical medicine 030227 psychiatry
30 10.20965/jrm.2016.p0371 02 engineering and technology 0201 civil engineering 020101 civil engineering
31 10.2337/dci19-0036 03 medical and health sciences 0302 clinical medicine 030212 general & internal medicine
32 10.1155/2018/7692913 01 natural sciences 0104 chemical sciences 010404 medicinal & biomolecular chemistry
33 10.1117/12.2262306 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020206 networking & telecommunications
34 10.1021/acs.jpcb.7b01885 01 natural sciences 0104 chemical sciences 010405 organic chemistry
35 10.1177/0033294117711131 05 social sciences 0502 economics and business 050203 business & management
36 10.1016/j.jrurstud.2017.08.019 05 social sciences 0502 economics and business 050203 business & management
37 10.1111/febs.15296 03 medical and health sciences 0301 basic medicine 030104 developmental biology
38 10.3923/jeasci.2017.6922.6927 05 social sciences 0505 law 050501 criminology
39 10.1007/s10854-017-6376-x 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020208 electrical & electronic engineering
40 10.3390/app10176095 02 engineering and technology 0202 electrical engineering, electronic engineering, information engineering 020209 energy

File diff suppressed because one or more lines are too long

View File

@ -1,277 +0,0 @@
{"code":"H2020-EU.5.g.","title":"Take due and proportional precautions in research and innovation activities by anticipating and assessing potential environmental, health and safety impacts","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Take due and proportional precautions in research and innovation activities by anticipating and assessing potential environmental, health and safety impacts","classification_short":"Science with and for Society | Take due and proportional precautions in research and innovation activities by anticipating and assessing potential environmental, health and safety impacts"}
{"code":"H2020-EU.3.4.2.1.","title":"A substantial reduction of traffic congestion","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Better mobility, less congestion, more safety and security | A substantial reduction of traffic congestion","classification_short":"Societal Challenges | Transport | Mobility, safety and security | A substantial reduction of traffic congestion"}
{"code":"H2020-EU.3.4.5.4.","title":"ITD Airframe","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | ITD Airframe","classification_short":"Societal Challenges | Transport | CLEANSKY2 | ITD Airframe"}
{"code":"H2020-EU.3.3.8.1.","title":"Increase the electrical efficiency and the durability of the different fuel cells used for power production to levels which can compete with conventional technologies, while reducing costs","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | FCH2 (energy objectives) | Increase the electrical efficiency and the durability of the different fuel cells used for power production to levels which can compete with conventional technologies, while reducing costs","classification_short":"Societal Challenges | Energy | FCH2 (energy objectives) | Increase the electrical efficiency and the durability of the different fuel cells used for power production to levels which can compete with conventional technologies, while reducing costs"}
{"code":"H2020-EU.3.7.1.","title":"Fight crime, illegal trafficking and terrorism, including understanding and tackling terrorist ideas and beliefs","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Fight crime, illegal trafficking and terrorism, including understanding and tackling terrorist ideas and beliefs","classification_short":"Societal Challenges | Secure societies | Fight crime, illegal trafficking and terrorism, including understanding and tackling terrorist ideas and beliefs"}
{"code":"H2020-EU.3.4.1.1.","title":"Making aircraft, vehicles and vessels cleaner and quieter will improve environmental performance and reduce perceived noise and vibration","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Resource efficient transport that respects the environment | Making aircraft, vehicles and vessels cleaner and quieter will improve environmental performance and reduce perceived noise and vibration","classification_short":"Societal Challenges | Transport | Resource efficient transport that respects the environment | Making aircraft, vehicles and vessels cleaner and quieter will improve environmental performance and reduce perceived noise and vibration"}
{"code":"H2020-EU.1.4.3.","title":"Reinforcing European research infrastructure policy and international cooperation","shortTitle":"Research infrastructure policy and international cooperation","language":"en","classification":"Excellent science | Research Infrastructures | Reinforcing European research infrastructure policy and international cooperation","classification_short":"Excellent Science | Research Infrastructures | Research infrastructure policy and international cooperation"}
{"code":"H2020-EU.1.4.","title":"EXCELLENT SCIENCE - Research Infrastructures","shortTitle":"Research Infrastructures","language":"en","classification":"Excellent science | Research Infrastructures","classification_short":"Excellent Science | Research Infrastructures"}
{"code":"H2020-EU.3.4.6.1.","title":"Reduce the production cost of fuel cell systems to be used in transport applications, while increasing their lifetime to levels which can compete with conventional technologies","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | FCH2 (transport objectives) | Reduce the production cost of fuel cell systems to be used in transport applications, while increasing their lifetime to levels which can compete with conventional technologies","classification_short":"Societal Challenges | Transport | FCH2 (transport objectives) | Reduce the production cost of fuel cell systems to be used in transport applications, while increasing their lifetime to levels which can compete with conventional technologies"}
{"code":"H2020-EU.3.4.5.5.","title":"ITD Engines","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | ITD Engines","classification_short":"Societal Challenges | Transport | CLEANSKY2 | ITD Engines"}
{"code":"H2020-EU.2.1.1.7.3.","title":"Multi-disciplinary approaches for smart systems, supported by developments in holistic design and advanced manufacturing to realise self-reliant and adaptable smart systems having sophisticated interfaces and offering complex functionalities based on, for example, the seamless integration of sensing, actuating, processing, energy provision and networking","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | ECSEL | Multi-disciplinary approaches for smart systems, supported by developments in holistic design and advanced manufacturing to realise self-reliant and adaptable smart systems having sophisticated interfaces and offering complex functionalities based on, for example, the seamless integration of sensing, actuating, processing, energy provision and networking","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | ECSEL | Multi-disciplinary approaches for smart systems, supported by developments in holistic design and advanced manufacturing to realise self-reliant and adaptable smart systems having sophisticated interfaces and offering complex functionalities based on, for example, the seamless integration of sensing, actuating, processing, energy provision and networking"}
{"code":"H2020-EU.3.1.6.1.","title":"Promoting integrated care","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Health care provision and integrated care | Promoting integrated care","classification_short":"Societal Challenges | Health | Health care provision and integrated care | Promoting integrated care"}
{"code":"H2020-EU.3.7.6.","title":"Ensure privacy and freedom, including in the Internet and enhance the societal, legal and ethical understanding of all areas of security, risk and management","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Ensure privacy and freedom, including in the Internet and enhance the societal, legal and ethical understanding of all areas of security, risk and management","classification_short":"Societal Challenges | Secure societies | Ensure privacy and freedom, including in the Internet and enhance the societal, legal and ethical understanding of all areas of security, risk and management"}
{"code":"H2020-EU.3.4.2.3.","title":"Developing new concepts of freight transport and logistics","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Better mobility, less congestion, more safety and security | Developing new concepts of freight transport and logistics","classification_short":"Societal Challenges | Transport | Mobility, safety and security | Developing new concepts of freight transport and logistics"}
{"code":"H2020-EU.3.3.2.1.","title":"Develop the full potential of wind energy","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Low-cost, low-carbon energy supply | Develop the full potential of wind energy","classification_short":"Societal Challenges | Energy | Low-cost, low-carbon energy supply | Develop the full potential of wind energy"}
{"code":"H2020-EU.3.2.5.","title":"Cross-cutting marine and maritime research","shortTitle":"Cross-cutting marine and maritime research","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Cross-cutting marine and maritime research","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Cross-cutting marine and maritime research"}
{"code":"H2020-EU.3.4.7.","title":"SESAR JU","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | SESAR JU","classification_short":"Societal Challenges | Transport | SESAR JU"}
{"code":"H2020-EU.2.1.3.3.","title":"Management of materials components","shortTitle":"Management of materials components","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Management of materials components","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Management of materials components"}
{"code":"H2020-EU.3.3.3.","title":"Alternative fuels and mobile energy sources","shortTitle":"Alternative fuels and mobile energy sources","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Alternative fuels and mobile energy sources","classification_short":"Societal Challenges | Energy | Alternative fuels and mobile energy sources"}
{"code":"H2020-EU.7.","title":"THE EUROPEAN INSTITUTE OF INNOVATION AND TECHNOLOGY (EIT)","shortTitle":"European Institute of Innovation and Technology (EIT)","language":"en","classification":"THE EUROPEAN INSTITUTE OF INNOVATION AND TECHNOLOGY (EIT)","classification_short":"European Institute of Innovation and Technology (EIT)"}
{"code":"H2020-EU.3.5.4.1.","title":"Strengthen eco-innovative technologies, processes, services and products including exploring ways to reduce the quantities of raw materials in production and consumption, and overcoming barriers in this context and boost their market uptake","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Enabling the transition towards a green economy and society through eco-innovation | Strengthen eco-innovative technologies, processes, services and products including exploring ways to reduce the quantities of raw materials in production and consumption, and overcoming barriers in this context and boost their market uptake","classification_short":"Societal Challenges | Climate and environment | A green economy and society through eco-innovation | Strengthen eco-innovative technologies, processes, services and products including exploring ways to reduce the quantities of raw materials in production and consumption, and overcoming barriers in this context and boost their market uptake"}
{"code":"H2020-EU.3.1.4.","title":"Active ageing and self-management of health","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Active ageing and self-management of health","classification_short":"Societal Challenges | Health | Active ageing and self-management of health"}
{"code":"H2020-EU.1.","title":"Excellent science","shortTitle":"Excellent Science","language":"en","classification":"Excellent science","classification_short":"Excellent Science"}
{"code":"H2020-EU.3.5.6.1.","title":"Identifying resilience levels via observations, monitoring and modelling","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Cultural heritage | Identifying resilience levels via observations, monitoring and modelling","classification_short":"Societal Challenges | Climate and environment | Cultural heritage | Identifying resilience levels via observations, monitoring and modelling"}
{"code":"H2020-EU.3.2.4.3.","title":"Supporting market development for bio-based products and processes","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive bio-based industries and supporting the development of a European bioeconomy | Supporting market development for bio-based products and processes","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based industries and supporting bio-economy | Supporting market development for bio-based products and processes"}
{"code":"H2020-EU.2.1.6.1.","title":"Enabling European competitiveness, non-dependence and innovation of the European space sector","shortTitle":"Competitiveness, non-dependence and innovation","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling European competitiveness, non-dependence and innovation of the European space sector","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Competitiveness, non-dependence and innovation"}
{"code":"H2020-EU.4.b.","title":"Twinning of research institutions","shortTitle":"Twinning of research institutions","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | Twinning of research institutions","classification_short":"Spreading excellence and widening participation | Twinning of research institutions"}
{"code":"H2020-EU.3.1.7.6.","title":"Psychiatric diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Psychiatric diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Psychiatric diseases"}
{"code":"H2020-EU.3.1.2.2.","title":"Improving diagnosis and prognosis","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Preventing disease | Improving diagnosis and prognosis","classification_short":"Societal Challenges | Health | Preventing disease | Improving diagnosis and prognosis"}
{"code":"H2020-EU.3.4.5.3.","title":"IADP Fast Rotorcraft","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | IADP Fast Rotorcraft","classification_short":"Societal Challenges | Transport | CLEANSKY2 | IADP Fast Rotorcraft"}
{"code":"H2020-EU.3.1.3.1.","title":"Treating disease, including developing regenerative medicine","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Treating and managing disease | Treating disease, including developing regenerative medicine","classification_short":"Societal Challenges | Health | Treating and managing disease | Treating disease, including developing regenerative medicine"}
{"code":"H2020-EU.3.4.3.3.","title":"Advanced production processes","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Global leadership for the European transport industry | Advanced production processes","classification_short":"Societal Challenges | Transport | Global leadership for the European transport industry | Advanced production processes"}
{"code":"H2020-EU.3.1.7.","title":"Innovative Medicines Initiative 2 (IMI2)","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2)","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2)"}
{"code":"H2020-EU.3.6.3.2.","title":"Research into European countries' and regions' history, literature, art, philosophy and religions and how these have informed contemporary European diversity","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Reflective societies - cultural heritage and European identity | Research into European countries' and regions' history, literature, art, philosophy and religions and how these have informed contemporary European diversity","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Reflective societies | Research into European countries' and regions' history, literature, art, philosophy and religions and how these have informed contemporary European diversity"}
{"code":"H2020-EU.3.5.1.2.","title":"Assess impacts, vulnerabilities and develop innovative cost-effective adaptation and risk prevention and management measures","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Fighting and adapting to climate change | Assess impacts, vulnerabilities and develop innovative cost-effective adaptation and risk prevention and management measures","classification_short":"Societal Challenges | Climate and environment | Fighting and adapting to climate change | Assess impacts, vulnerabilities and develop innovative cost-effective adaptation and risk prevention and management measures"}
{"code":"H2020-EU.3.6.1.","title":"Inclusive societies","shortTitle":"Inclusive societies","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Inclusive societies","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Inclusive societies"}
{"code":"H2020-EU.3.2.","title":"SOCIETAL CHALLENGES - Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy","shortTitle":"Food, agriculture, forestry, marine research and bioeconomy","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy"}
{"code":"H2020-EU.2.1.6.1.2.","title":"Boost innovation between space and non-space sectors","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling European competitiveness, non-dependence and innovation of the European space sector | Boost innovation between space and non-space sectors","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Competitiveness, non-dependence and innovation | Boost innovation between space and non-space sectors"}
{"code":"H2020-EU.2.1.3.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies - Advanced materials","shortTitle":"Advanced materials","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials"}
{"code":"H2020-EU.2.1.2.3.","title":"Developing the societal dimension of nanotechnology","shortTitle":"Societal dimension of nanotechnology","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies | Developing the societal dimension of nanotechnology","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies | Societal dimension of nanotechnology"}
{"code":"H2020-EU.4.","title":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION","shortTitle":"Spreading excellence and widening participation","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION","classification_short":"Spreading excellence and widening participation"}
{"code":"H2020-EU.3.6.1.2.","title":"Trusted organisations, practices, services and policies that are necessary to build resilient, inclusive, participatory, open and creative societies in Europe, in particular taking into account migration, integration and demographic change","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Inclusive societies | Trusted organisations, practices, services and policies that are necessary to build resilient, inclusive, participatory, open and creative societies in Europe, in particular taking into account migration, integration and demographic change","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Inclusive societies | Trusted organisations, practices, services and policies that are necessary to build resilient, inclusive, participatory, open and creative societies in Europe, in particular taking into account migration, integration and demographic change"}
{"code":"H2020-EU.3.4.2.","title":"Better mobility, less congestion, more safety and security","shortTitle":"Mobility, safety and security","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Better mobility, less congestion, more safety and security","classification_short":"Societal Challenges | Transport | Mobility, safety and security"}
{"code":"H2020-EU.3.1.7.13.","title":"Other","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Other","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Other"}
{"code":"H2020-EU.3.3.3.3.","title":"New alternative fuels","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Alternative fuels and mobile energy sources | New alternative fuels","classification_short":"Societal Challenges | Energy | Alternative fuels and mobile energy sources | New alternative fuels"}
{"code":"H2020-EU.2.1.3.5.","title":"Materials for creative industries, including heritage","shortTitle":"Materials for creative industries, including heritage","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Materials for creative industries, including heritage","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Materials for creative industries, including heritage"}
{"code":"H2020-EU.3.3.3.2.","title":"Reducing time to market for hydrogen and fuel cells technologies","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Alternative fuels and mobile energy sources | Reducing time to market for hydrogen and fuel cells technologies","classification_short":"Societal Challenges | Energy | Alternative fuels and mobile energy sources | Reducing time to market for hydrogen and fuel cells technologies"}
{"code":"H2020-EU.5.d.","title":"Encourage citizens to engage in science through formal and informal science education, and promote the diffusion of science-based activities, namely in science centres and through other appropriate channels","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Encourage citizens to engage in science through formal and informal science education, and promote the diffusion of science-based activities, namely in science centres and through other appropriate channels","classification_short":"Science with and for Society | Encourage citizens to engage in science through formal and informal science education, and promote the diffusion of science-based activities, namely in science centres and through other appropriate channels"}
{"code":"H2020-EU.3.1.","title":"SOCIETAL CHALLENGES - Health, demographic change and well-being","shortTitle":"Health","language":"en","classification":"Societal challenges | Health, demographic change and well-being","classification_short":"Societal Challenges | Health"}
{"code":"H2020-EU.3.5.3.1.","title":"Improve the knowledge base on the availability of raw materials","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Ensuring the sustainable supply of non-energy and non-agricultural raw materials | Improve the knowledge base on the availability of raw materials","classification_short":"Societal Challenges | Climate and environment | Supply of non-energy and non-agricultural raw materials | Improve the knowledge base on the availability of raw materials"}
{"code":"H2020-EU.3.2.1.4.","title":"Sustainable forestry","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable agriculture and forestry | Sustainable forestry","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable agriculture and forestry | Sustainable forestry"}
{"code":"H2020-EU.3.3.","title":"SOCIETAL CHALLENGES - Secure, clean and efficient energy","shortTitle":"Energy","language":"en","classification":"Societal challenges | Secure, clean and efficient energy","classification_short":"Societal Challenges | Energy"}
{"code":"H2020-EU.3.4.8.1.","title":"Innovation Programme 1 (IP1): Cost-efficient and reliable trains","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Innovation Programme 1 (IP1): Cost-efficient and reliable trains","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Innovation Programme 1 (IP1): Cost-efficient and reliable trains"}
{"code":"H2020-EU.2.3.2.1.","title":"Support for research intensive SMEs","shortTitle":"Support for research intensive SMEs","language":"en","classification":"Industrial leadership | Innovation In SMEs | Specific support | Support for research intensive SMEs","classification_short":"Industrial Leadership | Innovation in SMEs | Specific support | Support for research intensive SMEs"}
{"code":"H2020-EU.2.1.3.2.","title":"Materials development and transformation","shortTitle":"Materials development and transformation","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Materials development and transformation","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Materials development and transformation"}
{"code":"H2020-EU.1.4.1.3.","title":"Development, deployment and operation of ICT-based e-infrastructures","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Developing the European research infrastructures for 2020 and beyond | Development, deployment and operation of ICT-based e-infrastructures","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures for 2020 and beyond | Development, deployment and operation of ICT-based e-infrastructures"}
{"code":"H2020-EU.3.5.4.2.","title":"Support innovative policies and societal changes","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Enabling the transition towards a green economy and society through eco-innovation | Support innovative policies and societal changes","classification_short":"Societal Challenges | Climate and environment | A green economy and society through eco-innovation | Support innovative policies and societal changes"}
{"code":"H2020-EU.2.1.3.6.","title":"Metrology, characterisation, standardisation and quality control","shortTitle":"Metrology, characterisation, standardisation and quality control","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Metrology, characterisation, standardisation and quality control","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Metrology, characterisation, standardisation and quality control"}
{"code":"H2020-EU.3.4.5.8.","title":"ECO Transverse Area","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | ECO Transverse Area","classification_short":"Societal Challenges | Transport | CLEANSKY2 | ECO Transverse Area"}
{"code":"H2020-EU.5.f.","title":"Develop the governance for the advancement of responsible research and innovation by all stakeholders, which is sensitive to society needs and demands and promote an ethics framework for research and innovation","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Develop the governance for the advancement of responsible research and innovation by all stakeholders, which is sensitive to society needs and demands and promote an ethics framework for research and innovation","classification_short":"Science with and for Society | Develop the governance for the advancement of responsible research and innovation by all stakeholders, which is sensitive to society needs and demands and promote an ethics framework for research and innovation"}
{"code":"H2020-EU.5.h.","title":"Improving knowledge on science communication in order to improve the quality and effectiveness of interactions between scientists, general media and the public","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Improving knowledge on science communication in order to improve the quality and effectiveness of interactions between scientists, general media and the public","classification_short":"Science with and for Society | Improving knowledge on science communication in order to improve the quality and effectiveness of interactions between scientists, general media and the public"}
{"code":"H2020-EU.2.1.1.7.1.","title":"Design technologies, process and integration, equipment, materials and manufacturing for micro- and nanoelectronics while targeting miniaturisation, diversification and differentiation, heterogeneous integration","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | ECSEL | Design technologies, process and integration, equipment, materials and manufacturing for micro- and nanoelectronics while targeting miniaturisation, diversification and differentiation, heterogeneous integration","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | ECSEL | Design technologies, process and integration, equipment, materials and manufacturing for micro- and nanoelectronics while targeting miniaturisation, diversification and differentiation, heterogeneous integration"}
{"code":"H2020-EU.3.7.5.","title":"Increase Europe's resilience to crises and disasters","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Increase Europe's resilience to crises and disasters","classification_short":"Societal Challenges | Secure societies | Increase Europe's resilience to crises and disasters"}
{"code":"H2020-EU.1.4.2.2.","title":"Strengthening the human capital of research infrastructures","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Fostering the innovation potential of research infrastructures and their human resources | Strengthening the human capital of research infrastructures","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures and their human resources | Strengthening the human capital of research infrastructures"}
{"code":"H2020-EU.3.4.1.2.","title":"Developing smart equipment, infrastructures and services","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Resource efficient transport that respects the environment | Developing smart equipment, infrastructures and services","classification_short":"Societal Challenges | Transport | Resource efficient transport that respects the environment | Developing smart equipment, infrastructures and services"}
{"code":"H2020-EU.2.3.2.2.","title":"Enhancing the innovation capacity of SMEs","shortTitle":"Enhancing the innovation capacity of SMEs","language":"en","classification":"Industrial leadership | Innovation In SMEs | Specific support | Enhancing the innovation capacity of SMEs","classification_short":"Industrial Leadership | Innovation in SMEs | Specific support | Enhancing the innovation capacity of SMEs"}
{"code":"H2020-EU.1.3.5.","title":"Specific support and policy actions","shortTitle":"MSCA Specific support","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions | Specific support and policy actions","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions | MSCA Specific support"}
{"code":"H2020-EU.3.2.3.3.","title":"Boosting marine and maritime innovation through biotechnology","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Unlocking the potential of aquatic living resources | Boosting marine and maritime innovation through biotechnology","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Potential of aquatic living resources | Boosting marine and maritime innovation through biotechnology"}
{"code":"H2020-EU.3.2.1.2.","title":"Providing ecosystems services and public goods","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable agriculture and forestry | Providing ecosystems services and public goods","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable agriculture and forestry | Providing ecosystems services and public goods"}
{"code":"H2020-EU.2.3.2.3.","title":"Supporting market-driven innovation","shortTitle":"Supporting market-driven innovation","language":"en","classification":"Industrial leadership | Innovation In SMEs | Specific support | Supporting market-driven innovation","classification_short":"Industrial Leadership | Innovation in SMEs | Specific support | Supporting market-driven innovation"}
{"code":"H2020-EU.5.a.","title":"Make scientific and technological careers attractive to young students, and forster sustainable interaction between schools, research institutions, industry and civil society organisations","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Make scientific and technological careers attractive to young students, and forster sustainable interaction between schools, research institutions, industry and civil society organisations","classification_short":"Science with and for Society | Make scientific and technological careers attractive to young students, and forster sustainable interaction between schools, research institutions, industry and civil society organisations"}
{"code":"H2020-EU.3.1.7.9.","title":"Ageing-associated diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Ageing-associated diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Ageing-associated diseases"}
{"code":"H2020-EU.2.2.1.","title":"The Debt facility providing debt finance for R&I: 'Union loan and guarantee service for research and innovation'","shortTitle":"Debt facility","language":"en","classification":"Industrial leadership | Access to risk finance | The Debt facility providing debt finance for R&I: 'Union loan and guarantee service for research and innovation'","classification_short":"Industrial Leadership | Access to risk finance | Debt facility"}
{"code":"H2020-Euratom-1.8.","title":"Ensure availability and use of research infrastructures of pan_european relevance","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Ensure availability and use of research infrastructures of pan_european relevance","classification_short":"Euratom | Indirect actions | Ensure availability and use of research infrastructures of pan_european relevance"}
{"code":"H2020-EU.3.2.2.1.","title":"Informed consumer choices","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | Informed consumer choices","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | Informed consumer choices"}
{"code":"H2020-EU.3.7.","title":"Secure societies - Protecting freedom and security of Europe and its citizens","shortTitle":"Secure societies","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens","classification_short":"Societal Challenges | Secure societies"}
{"code":"H2020-EU.1.3.4.","title":"Increasing structural impact by co-funding activities","shortTitle":"MSCA Co-funding","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions | Increasing structural impact by co-funding activities","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions | MSCA Co-funding"}
{"code":"H2020-EU.2.1.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies","shortTitle":"Leadership in enabling and industrial technologies (LEIT)","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT)"}
{"code":"H2020-EU.2.1.3.4.","title":"Materials for a sustainable, resource-efficient and low-emission industry","shortTitle":"Materials for a resource-efficient and low-emission industry","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Materials for a sustainable, resource-efficient and low-emission industry","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Materials for a resource-efficient and low-emission industry"}
{"code":"H2020-EU.3.4.5.7.","title":"Small Air Transport (SAT) Transverse Area","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | Small Air Transport (SAT) Transverse Area","classification_short":"Societal Challenges | Transport | CLEANSKY2 | Small Air Transport (SAT) Transverse Area"}
{"code":"H2020-EU.3.4.8.3.","title":"Innovation Programme 3: Cost Efficient and Reliable High Capacity Infrastructure","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Innovation Programme 3: Cost Efficient and Reliable High Capacity Infrastructure","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Innovation Programme 3: Cost Efficient and Reliable High Capacity Infrastructure"}
{"code":"H2020-Euratom-1.1.","title":"Support safe operation of nuclear systems","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Support safe operation of nuclear systems","classification_short":"Euratom | Indirect actions | Support safe operation of nuclear systems"}
{"code":"H2020-EU.2.3.1.","title":" Mainstreaming SME support, especially through a dedicated instrument","shortTitle":"Mainstreaming SME support","language":"en","classification":"Industrial leadership | Innovation In SMEs | Mainstreaming SME support, especially through a dedicated instrument","classification_short":"Industrial Leadership | Innovation in SMEs | Mainstreaming SME support"}
{"code":"H2020-EU.1.4.3.1.","title":"Reinforcing European policy for research infrastructures","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Reinforcing European research infrastructure policy and international cooperation | Reinforcing European policy for research infrastructures","classification_short":"Excellent Science | Research Infrastructures | Research infrastructure policy and international cooperation | Reinforcing European policy for research infrastructures"}
{"code":"H2020-Euratom-1.3.","title":"Support the development and sustainability of nuclear competences at Union level","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Support the development and sustainability of nuclear competences at Union level","classification_short":"Euratom | Indirect actions | Support the development and sustainability of nuclear competences at Union level"}
{"code":"H2020-EU.3.1.7.1.","title":"Antimicrobial resistance","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Antimicrobial resistance","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Antimicrobial resistance"}
{"code":"H2020-EU.3.7.4.","title":"Improve cyber security","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Improve cyber security","classification_short":"Societal Challenges | Secure societies | Improve cyber security"}
{"code":"H2020-EU.2.1.1.7.2.","title":"Processes, methods, tools and platforms, reference designs and architectures, for software and/or control-intensive embedded/cyber-physical systems, addressing seamless connectivity and interoperability, functional safety, high availability, and security for professional and consumer type applications, and connected services","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | ECSEL | Processes, methods, tools and platforms, reference designs and architectures, for software and/or control-intensive embedded/cyber-physical systems, addressing seamless connectivity and interoperability, functional safety, high availability, and security for professional and consumer type applications, and connected services","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | ECSEL | Processes, methods, tools and platforms, reference designs and architectures, for software and/or control-intensive embedded/cyber-physical systems, addressing seamless connectivity and interoperability, functional safety, high availability, and security for professional and consumer type applications, and connected services"}
{"code":"H2020-EU.3.5.4.","title":"Enabling the transition towards a green economy and society through eco-innovation","shortTitle":"A green economy and society through eco-innovation","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Enabling the transition towards a green economy and society through eco-innovation","classification_short":"Societal Challenges | Climate and environment | A green economy and society through eco-innovation"}
{"code":"H2020-EU.3.5.3.2.","title":"Promote the sustainable supply and use of raw materials, including mineral resources, from land and sea, covering exploration, extraction, processing, re-use, recycling and recovery","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Ensuring the sustainable supply of non-energy and non-agricultural raw materials | Promote the sustainable supply and use of raw materials, including mineral resources, from land and sea, covering exploration, extraction, processing, re-use, recycling and recovery","classification_short":"Societal Challenges | Climate and environment | Supply of non-energy and non-agricultural raw materials | Promote the sustainable supply and use of raw materials, including mineral resources, from land and sea, covering exploration, extraction, processing, re-use, recycling and recovery"}
{"code":"H2020-EU.3.4.5.10.","title":"Thematic Topics","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | Thematic Topics","classification_short":"Societal Challenges | Transport | CLEANSKY2 | Thematic Topics"}
{"code":"H2020-EU.3.1.5.1.","title":"Improving halth information and better use of health data","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Methods and data | Improving halth information and better use of health data","classification_short":"Societal Challenges | Health | Methods and data | Improving halth information and better use of health data"}
{"code":"H2020-EU.3.3.3.1.","title":"Make bio-energy more competitive and sustainable","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Alternative fuels and mobile energy sources | Make bio-energy more competitive and sustainable","classification_short":"Societal Challenges | Energy | Alternative fuels and mobile energy sources | Make bio-energy more competitive and sustainable"}
{"code":"H2020-EU.3.6.2.1.","title":"Strengthen the evidence base and support for the Innovation Union and ERA","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Innovative societies | Strengthen the evidence base and support for the Innovation Union and ERA","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Innovative societies | Strengthen the evidence base and support for the Innovation Union and ERA"}
{"code":"H2020-EU.3.1.7.12.","title":"Vaccine","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Vaccine","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Vaccine"}
{"code":"H2020-EU.3.5.4.3.","title":"Measure and assess progress towards a green economy","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Enabling the transition towards a green economy and society through eco-innovation | Measure and assess progress towards a green economy","classification_short":"Societal Challenges | Climate and environment | A green economy and society through eco-innovation | Measure and assess progress towards a green economy"}
{"code":"H2020-EU.3.4.8.5.","title":"Innovation Programme 5: Technologies for sustainable and attractive European rail freight","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Innovation Programme 5: Technologies for sustainable and attractive European rail freight","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Innovation Programme 5: Technologies for sustainable and attractive European rail freight"}
{"code":"H2020-EU.3.5.4.4.","title":"Foster resource efficiency through digital systems","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Enabling the transition towards a green economy and society through eco-innovation | Foster resource efficiency through digital systems","classification_short":"Societal Challenges | Climate and environment | A green economy and society through eco-innovation | Foster resource efficiency through digital systems"}
{"code":"H2020-EU.3.3.8.3.","title":"Demonstrate on a large scale the feasibility of using hydrogen to support integration of renewable energy sources into the energy systems, including through its use as a competitive energy storage medium for electricity produced from renewable energy sources","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | FCH2 (energy objectives) | Demonstrate on a large scale the feasibility of using hydrogen to support integration of renewable energy sources into the energy systems, including through its use as a competitive energy storage medium for electricity produced from renewable energy sources","classification_short":"Societal Challenges | Energy | FCH2 (energy objectives) | Demonstrate on a large scale the feasibility of using hydrogen to support integration of renewable energy sources into the energy systems, including through its use as a competitive energy storage medium for electricity produced from renewable energy sources"}
{"code":"H2020-Euratom","title":"Euratom","shortTitle":"","language":"en","classification":"Euratom","classification_short":"Euratom"}
{"code":"H2020-EU.3.5.6.2.","title":"Providing for a better understanding on how communities perceive and respond to climate change and seismic and volcanic hazards","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Cultural heritage | Providing for a better understanding on how communities perceive and respond to climate change and seismic and volcanic hazards","classification_short":"Societal Challenges | Climate and environment | Cultural heritage | Providing for a better understanding on how communities perceive and respond to climate change and seismic and volcanic hazards"}
{"code":"H2020-EU.3.2.5.2.","title":"Develop the potential of marine resources through an integrated approach","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Cross-cutting marine and maritime research | Develop the potential of marine resources through an integrated approach","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Cross-cutting marine and maritime research | Develop the potential of marine resources through an integrated approach"}
{"code":"H2020-EU.2.1.1.5.","title":"Advanced interfaces and robots: Robotics and smart spaces","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | Advanced interfaces and robots: Robotics and smart spaces","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | Advanced interfaces and robots: Robotics and smart spaces"}
{"code":"H2020-EU.3.3.5.","title":"New knowledge and technologies","shortTitle":"New knowledge and technologies","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | New knowledge and technologies","classification_short":"Societal Challenges | Energy | New knowledge and technologies"}
{"code":"H2020-EU.1.2.2.","title":"FET Proactive","shortTitle":"FET Proactive","language":"en","classification":"Excellent science | Future and Emerging Technologies (FET) | FET Proactive","classification_short":"Excellent Science | Future and Emerging Technologies (FET) | FET Proactive"}
{"code":"H2020-EU.3.6.1.3.","title":"Europe's role as a global actor, notably regarding human rights and global justice","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Inclusive societies | Europe's role as a global actor, notably regarding human rights and global justice","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Inclusive societies | Europe's role as a global actor, notably regarding human rights and global justice"}
{"code":"H2020-EU.2.1.4.1.","title":"Boosting cutting-edge biotechnologies as a future innovation driver","shortTitle":"Cutting-edge biotechnologies as future innovation driver","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Biotechnology | Boosting cutting-edge biotechnologies as a future innovation driver","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Biotechnology | Cutting-edge biotechnologies as future innovation driver"}
{"code":"H2020-EU.3.1.3.","title":"Treating and managing disease","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Treating and managing disease","classification_short":"Societal Challenges | Health | Treating and managing disease"}
{"code":"H2020-EU.3.3.4.","title":"A single, smart European electricity grid","shortTitle":"A single, smart European electricity grid","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | A single, smart European electricity grid","classification_short":"Societal Challenges | Energy | A single, smart European electricity grid"}
{"code":"H2020-EU.3.2.6.","title":"Bio-based Industries Joint Technology Initiative (BBI-JTI)","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI)","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI)"}
{"code":"H2020-EU.1.3.2.","title":"Nurturing excellence by means of cross-border and cross-sector mobility","shortTitle":"MSCA Mobility","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions | Nurturing excellence by means of cross-border and cross-sector mobility","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions | MSCA Mobility"}
{"code":"H2020-EU.2.1.3.7.","title":"Optimisation of the use of materials","shortTitle":"Optimisation of the use of materials","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Optimisation of the use of materials","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Optimisation of the use of materials"}
{"code":"H2020-EU.2.1.2.4.","title":"Efficient and sustainable synthesis and manufacturing of nanomaterials, components and systems","shortTitle":"Synthesis and manufacturing of nanomaterials, components and systems","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies | Efficient and sustainable synthesis and manufacturing of nanomaterials, components and systems","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies | Synthesis and manufacturing of nanomaterials, components and systems"}
{"code":"H2020-EU.1.4.1.","title":"Developing the European research infrastructures for 2020 and beyond","shortTitle":"Research infrastructures for 2020 and beyond","language":"en","classification":"Excellent science | Research Infrastructures | Developing the European research infrastructures for 2020 and beyond","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures for 2020 and beyond"}
{"code":"H2020-EU.3.1.1.1.","title":"Understanding the determinants of health, improving health promotion and disease prevention","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Understanding health, wellbeing and disease | Understanding the determinants of health, improving health promotion and disease prevention","classification_short":"Societal Challenges | Health | Understanding health, wellbeing and disease | Understanding the determinants of health, improving health promotion and disease prevention"}
{"code":"H2020-EU.5.c.","title":"Integrate society in science and innovation issues, policies and activities in order to integrate citizens' interests and values and to increase the quality, relevance, social acceptability and sustainability of research and innovation outcomes in various fields of activity from social innovation to areas such as biotechnology and nanotechnology","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Integrate society in science and innovation issues, policies and activities in order to integrate citizens' interests and values and to increase the quality, relevance, social acceptability and sustainability of research and innovation outcomes in various fields of activity from social innovation to areas such as biotechnology and nanotechnology","classification_short":"Science with and for Society | Integrate society in science and innovation issues, policies and activities in order to integrate citizens' interests and values and to increase the quality, relevance, social acceptability and sustainability of research and innovation outcomes in various fields of activity from social innovation to areas such as biotechnology and nanotechnology"}
{"code":"H2020-EU.5.","title":"SCIENCE WITH AND FOR SOCIETY","shortTitle":"Science with and for Society","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY","classification_short":"Science with and for Society"}
{"code":"H2020-EU.3.5.3.3.","title":"Find alternatives for critical raw materials","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Ensuring the sustainable supply of non-energy and non-agricultural raw materials | Find alternatives for critical raw materials","classification_short":"Societal Challenges | Climate and environment | Supply of non-energy and non-agricultural raw materials | Find alternatives for critical raw materials"}
{"code":"H2020-EU.3.2.3.1.","title":"Developing sustainable and environmentally-friendly fisheries","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Unlocking the potential of aquatic living resources | Developing sustainable and environmentally-friendly fisheries","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Potential of aquatic living resources | Developing sustainable and environmentally-friendly fisheries"}
{"code":"H2020-EU.2.1.2.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies Nanotechnologies","shortTitle":"Nanotechnologies","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies"}
{"code":"H2020-EU.3.4.3.2.","title":"On board, smart control systems","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Global leadership for the European transport industry | On board, smart control systems","classification_short":"Societal Challenges | Transport | Global leadership for the European transport industry | On board, smart control systems"}
{"code":"H2020-EU.3.2.4.1.","title":"Fostering the bio-economy for bio-based industries","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive bio-based industries and supporting the development of a European bioeconomy | Fostering the bio-economy for bio-based industries","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based industries and supporting bio-economy | Fostering the bio-economy for bio-based industries"}
{"code":"H2020-EU.3.1.6.2.","title":"Optimising the efficiency and effectiveness of healthcare provision and reducing inequalities by evidence based decision making and dissemination of best practice, and innovative technologies and approaches","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Health care provision and integrated care | Optimising the efficiency and effectiveness of healthcare provision and reducing inequalities by evidence based decision making and dissemination of best practice, and innovative technologies and approaches","classification_short":"Societal Challenges | Health | Health care provision and integrated care | Optimising the efficiency and effectiveness of healthcare provision and reducing inequalities by evidence based decision making and dissemination of best practice, and innovative technologies and approaches"}
{"code":"H2020-EU.2.1.5.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies - Advanced manufacturing and processing","shortTitle":"Advanced manufacturing and processing","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced manufacturing and processing","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced manufacturing and processing"}
{"code":"H2020-EU.3.5.2.2.","title":"Developing integrated approaches to address water-related challenges and the transition to sustainable management and use of water resources and services","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Protection of the environment, sustainable management of natural resources, water, biodiversity and ecosystems | Developing integrated approaches to address water-related challenges and the transition to sustainable management and use of water resources and services","classification_short":"Societal Challenges | Climate and environment | Protection of the environment | Developing integrated approaches to address water-related challenges and the transition to sustainable management and use of water resources and services"}
{"code":"H2020-EU.3.1.7.3.","title":"Cardiovascular diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Cardiovascular diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Cardiovascular diseases"}
{"code":"H2020-EU.3.3.8.2.","title":"Increase the energy efficiency of production of hydrogen mainly from water electrolysis and renewable sources while reducing operating and capital costs, so that the combined system of the hydrogen production and the conversion using the fuel cell system can compete with the alternatives for electricity production available on the market","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | FCH2 (energy objectives) | Increase the energy efficiency of production of hydrogen mainly from water electrolysis and renewable sources while reducing operating and capital costs, so that the combined system of the hydrogen production and the conversion using the fuel cell system can compete with the alternatives for electricity production available on the market","classification_short":"Societal Challenges | Energy | FCH2 (energy objectives) | Increase the energy efficiency of production of hydrogen mainly from water electrolysis and renewable sources while reducing operating and capital costs, so that the combined system of the hydrogen production and the conversion using the fuel cell system can compete with the alternatives for electricity production available on the market"}
{"code":"H2020-EU.2.1.6.3.","title":"Enabling exploitation of space data","shortTitle":"Enabling exploitation of space data","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling exploitation of space data","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Enabling exploitation of space data"}
{"code":"H2020-EU.2.1.2.5.","title":"Developing and standardisation of capacity-enhancing techniques, measuring methods and equipment","shortTitle":"Capacity-enhancing techniques, measuring methods and equipment","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies | Developing and standardisation of capacity-enhancing techniques, measuring methods and equipment","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies | Capacity-enhancing techniques, measuring methods and equipment"}
{"code":"H2020-EU.3.6.2.","title":"Innovative societies","shortTitle":"Innovative societies","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Innovative societies","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Innovative societies"}
{"code":"H2020-EU.3.1.2.1.","title":"Developing effective prevention and screening programmes and improving the assessment of disease susceptibility","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Preventing disease | Developing effective prevention and screening programmes and improving the assessment of disease susceptibility","classification_short":"Societal Challenges | Health | Preventing disease | Developing effective prevention and screening programmes and improving the assessment of disease susceptibility"}
{"code":"H2020-EU.3.6.1.4.","title":"The promotion of sustainable and inclusive environments through innovative spatial and urban planning and design","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Inclusive societies | The promotion of sustainable and inclusive environments through innovative spatial and urban planning and design","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Inclusive societies | The promotion of sustainable and inclusive environments through innovative spatial and urban planning and design"}
{"code":"H2020-EU.3.3.2.4.","title":"Develop geothermal, hydro, marine and other renewable energy options","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Low-cost, low-carbon energy supply | Develop geothermal, hydro, marine and other renewable energy options","classification_short":"Societal Challenges | Energy | Low-cost, low-carbon energy supply | Develop geothermal, hydro, marine and other renewable energy options"}
{"code":"H2020-EU.5.b.","title":"Promote gender equality in particular by supporting structural change in the organisation of research institutions and in the content and design of research activities","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Promote gender equality in particular by supporting structural change in the organisation of research institutions and in the content and design of research activities","classification_short":"Science with and for Society | Promote gender equality in particular by supporting structural change in the organisation of research institutions and in the content and design of research activities"}
{"code":"H2020-EU.1.3.3.","title":"Stimulating innovation by means of cross-fertilisation of knowledge","shortTitle":"MSCA Knowledge","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions | Stimulating innovation by means of cross-fertilisation of knowledge","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions | MSCA Knowledge"}
{"code":"H2020-EU.3.1.4.2.","title":"Individual awareness and empowerment for self-management of health","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Active ageing and self-management of health | Individual awareness and empowerment for self-management of health","classification_short":"Societal Challenges | Health | Active ageing and self-management of health | Individual awareness and empowerment for self-management of health"}
{"code":"H2020-EU.3.1.7.8.","title":"Immune-mediated diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Immune-mediated diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Immune-mediated diseases"}
{"code":"H2020-EU.3.4.","title":"SOCIETAL CHALLENGES - Smart, Green And Integrated Transport","shortTitle":"Transport","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport","classification_short":"Societal Challenges | Transport"}
{"code":"H2020-EU.3.2.6.1.","title":"Sustainable and competitive bio-based industries and supporting the development of a European bio-economy","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Sustainable and competitive bio-based industries and supporting the development of a European bio-economy","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Sustainable and competitive bio-based industries and supporting the development of a European bio-economy"}
{"code":"H2020-EU.2.1.2.1.","title":"Developing next generation nanomaterials, nanodevices and nanosystems ","shortTitle":"Next generation nanomaterials, nanodevices and nanosystems","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies | Developing next generation nanomaterials, nanodevices and nanosystems ","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies | Next generation nanomaterials, nanodevices and nanosystems"}
{"code":"H2020-Euratom-1.5.","title":"Move toward demonstration of feasibility of fusion as a power source by exploiting existing and future fusion facilities","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Move toward demonstration of feasibility of fusion as a power source by exploiting existing and future fusion facilities","classification_short":"Euratom | Indirect actions | Move toward demonstration of feasibility of fusion as a power source by exploiting existing and future fusion facilities"}
{"code":"H2020-EU.3.5.","title":"SOCIETAL CHALLENGES - Climate action, Environment, Resource Efficiency and Raw Materials","shortTitle":"Climate and environment","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials","classification_short":"Societal Challenges | Climate and environment"}
{"code":"H2020-EU.2.1.1.6.","title":"Micro- and nanoelectronics and photonics: Key enabling technologies related to micro- and nanoelectronics and to photonics, covering also quantum technologies","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | Micro- and nanoelectronics and photonics: Key enabling technologies related to micro- and nanoelectronics and to photonics, covering also quantum technologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | Micro- and nanoelectronics and photonics: Key enabling technologies related to micro- and nanoelectronics and to photonics, covering also quantum technologies"}
{"code":"H2020-EU.3.4.2.4.","title":"Reducing accident rates, fatalities and casualties and improving security","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Better mobility, less congestion, more safety and security | Reducing accident rates, fatalities and casualties and improving security","classification_short":"Societal Challenges | Transport | Mobility, safety and security | Reducing accident rates, fatalities and casualties and improving security"}
{"code":"H2020-EU.3.6.2.2.","title":"Explore new forms of innovation, with special emphasis on social innovation and creativity and understanding how all forms of innovation are developed, succeed or fail","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Innovative societies | Explore new forms of innovation, with special emphasis on social innovation and creativity and understanding how all forms of innovation are developed, succeed or fail","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Innovative societies | Explore new forms of innovation, with special emphasis on social innovation and creativity and understanding how all forms of innovation are developed, succeed or fail"}
{"code":"H2020-EU.3.5.1.1.","title":"Improve the understanding of climate change and the provision of reliable climate projections","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Fighting and adapting to climate change | Improve the understanding of climate change and the provision of reliable climate projections","classification_short":"Societal Challenges | Climate and environment | Fighting and adapting to climate change | Improve the understanding of climate change and the provision of reliable climate projections"}
{"code":"H2020-EU.3.4.3.4.","title":"Exploring entirely new transport concepts","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Global leadership for the European transport industry | Exploring entirely new transport concepts","classification_short":"Societal Challenges | Transport | Global leadership for the European transport industry | Exploring entirely new transport concepts"}
{"code":"H2020-EU.3.5.2.1.","title":"Further our understanding of biodiversity and the functioning of ecosystems, their interactions with social systems and their role in sustaining the economy and human well-being","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Protection of the environment, sustainable management of natural resources, water, biodiversity and ecosystems | Further our understanding of biodiversity and the functioning of ecosystems, their interactions with social systems and their role in sustaining the economy and human well-being","classification_short":"Societal Challenges | Climate and environment | Protection of the environment | Further our understanding of biodiversity and the functioning of ecosystems, their interactions with social systems and their role in sustaining the economy and human well-being"}
{"code":"H2020-EU.3.2.2.3.","title":"A sustainable and competitive agri-food industry","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | A sustainable and competitive agri-food industry","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | A sustainable and competitive agri-food industry"}
{"code":"H2020-EU.1.4.1.1.","title":"Developing new world-class research infrastructures","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Developing the European research infrastructures for 2020 and beyond | Developing new world-class research infrastructures","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures for 2020 and beyond | Developing new world-class research infrastructures"}
{"code":"H2020-EU.3.1.2.3.","title":"Developing better preventive and therapeutic vaccines","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Preventing disease | Developing better preventive and therapeutic vaccines","classification_short":"Societal Challenges | Health | Preventing disease | Developing better preventive and therapeutic vaccines"}
{"code":"H2020-EU.1.4.3.2.","title":"Facilitate strategic international cooperation","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Reinforcing European research infrastructure policy and international cooperation | Facilitate strategic international cooperation","classification_short":"Excellent Science | Research Infrastructures | Research infrastructure policy and international cooperation | Facilitate strategic international cooperation"}
{"code":"H2020-EU.3.5.2.","title":"Protection of the environment, sustainable management of natural resources, water, biodiversity and ecosystems","shortTitle":"Protection of the environment","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Protection of the environment, sustainable management of natural resources, water, biodiversity and ecosystems","classification_short":"Societal Challenges | Climate and environment | Protection of the environment"}
{"code":"H2020-Euratom-1.9.","title":"European Fusion Development Agreement","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | European Fusion Development Agreement","classification_short":"Euratom | Indirect actions | European Fusion Development Agreement"}
{"code":"H2020-EU.3.2.1.1.","title":"Increasing production efficiency and coping with climate change, while ensuring sustainability and resilience","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable agriculture and forestry | Increasing production efficiency and coping with climate change, while ensuring sustainability and resilience","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable agriculture and forestry | Increasing production efficiency and coping with climate change, while ensuring sustainability and resilience"}
{"code":"H2020-EU.3.2.2.2.","title":"Healthy and safe foods and diets for all","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | Healthy and safe foods and diets for all","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet | Healthy and safe foods and diets for all"}
{"code":"H2020-EU.2.1.4.2.","title":"Bio-technology based industrial products and processes","shortTitle":"Bio-technology based industrial products and processes","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Biotechnology | Bio-technology based industrial products and processes","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Biotechnology | Bio-technology based industrial products and processes"}
{"code":"H2020-EU.3.4.5.1.","title":"IADP Large Passenger Aircraft","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | IADP Large Passenger Aircraft","classification_short":"Societal Challenges | Transport | CLEANSKY2 | IADP Large Passenger Aircraft"}
{"code":"H2020-EU.3.1.1.3.","title":"Improving surveillance and preparedness","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Understanding health, wellbeing and disease | Improving surveillance and preparedness","classification_short":"Societal Challenges | Health | Understanding health, wellbeing and disease | Improving surveillance and preparedness"}
{"code":"H2020-EU.2.1.6.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies Space","shortTitle":"Space","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space"}
{"code":"H2020-EU.3.1.5.2.","title":"Improving scientific tools and methods to support policy making and regulatory needs","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Methods and data | Improving scientific tools and methods to support policy making and regulatory needs","classification_short":"Societal Challenges | Health | Methods and data | Improving scientific tools and methods to support policy making and regulatory needs"}
{"code":"H2020-EU.3.","title":"Societal challenges","shortTitle":"Societal Challenges","language":"en","classification":"Societal challenges","classification_short":"Societal Challenges"}
{"code":"H2020-EU.1.3.","title":"EXCELLENT SCIENCE - Marie Skłodowska-Curie Actions","shortTitle":"Marie-Sklodowska-Curie Actions","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions"}
{"code":"H2020-EU.4.f.","title":"Strengthening the administrative and operational capacity of transnational networks of National Contact Points","shortTitle":"","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | Strengthening the administrative and operational capacity of transnational networks of National Contact Points","classification_short":"Spreading excellence and widening participation | Strengthening the administrative and operational capacity of transnational networks of National Contact Points"}
{"code":"H2020-EU.1.2.","title":"EXCELLENT SCIENCE - Future and Emerging Technologies (FET)","shortTitle":"Future and Emerging Technologies (FET)","language":"en","classification":"Excellent science | Future and Emerging Technologies (FET)","classification_short":"Excellent Science | Future and Emerging Technologies (FET)"}
{"code":"H2020-EU.3.3.1.1.","title":"Bring to mass market technologies and services for a smart and efficient energy use","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Reducing energy consumption and carbon foorpint by smart and sustainable use | Bring to mass market technologies and services for a smart and efficient energy use","classification_short":"Societal Challenges | Energy | Reducing energy consumption and carbon footprint | Bring to mass market technologies and services for a smart and efficient energy use"}
{"code":"H2020-EU.3.3.2.2.","title":"Develop efficient, reliable and cost-competitive solar energy systems","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Low-cost, low-carbon energy supply | Develop efficient, reliable and cost-competitive solar energy systems","classification_short":"Societal Challenges | Energy | Low-cost, low-carbon energy supply | Develop efficient, reliable and cost-competitive solar energy systems"}
{"code":"H2020-EU.4.c.","title":"Establishing ERA Chairs","shortTitle":"ERA chairs","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | Establishing ERA Chairs","classification_short":"Spreading excellence and widening participation | ERA chairs"}
{"code":"H2020-EU.3.4.5","title":"CLEANSKY2","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2","classification_short":"Societal Challenges | Transport | CLEANSKY2"}
{"code":"H2020-EU.3.4.5.2.","title":"IADP Regional Aircraft","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | IADP Regional Aircraft","classification_short":"Societal Challenges | Transport | CLEANSKY2 | IADP Regional Aircraft"}
{"code":"H2020-EU.3.5.1.","title":"Fighting and adapting to climate change","shortTitle":"Fighting and adapting to climate change","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Fighting and adapting to climate change","classification_short":"Societal Challenges | Climate and environment | Fighting and adapting to climate change"}
{"code":"H2020-EU.3.3.1.","title":"Reducing energy consumption and carbon foorpint by smart and sustainable use","shortTitle":"Reducing energy consumption and carbon footprint","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Reducing energy consumption and carbon foorpint by smart and sustainable use","classification_short":"Societal Challenges | Energy | Reducing energy consumption and carbon footprint"}
{"code":"H2020-EU.3.4.1.","title":"Resource efficient transport that respects the environment","shortTitle":"Resource efficient transport that respects the environment","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Resource efficient transport that respects the environment","classification_short":"Societal Challenges | Transport | Resource efficient transport that respects the environment"}
{"code":"H2020-EU.3.2.6.2.","title":"Fostering the bio-economy for bio-based industrie","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Fostering the bio-economy for bio-based industrie","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Fostering the bio-economy for bio-based industrie"}
{"code":"H2020-EU.3.4.7.1","title":"Exploratory Research","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | SESAR JU | Exploratory Research","classification_short":"Societal Challenges | Transport | SESAR JU | Exploratory Research"}
{"code":"H2020-EU.1.2.1.","title":"FET Open","shortTitle":"FET Open","language":"en","classification":"Excellent science | Future and Emerging Technologies (FET) | FET Open","classification_short":"Excellent Science | Future and Emerging Technologies (FET) | FET Open"}
{"code":"H2020-EU.3.4.3.1.","title":"Developing the next generation of transport means as the way to secure market share in the future","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Global leadership for the European transport industry | Developing the next generation of transport means as the way to secure market share in the future","classification_short":"Societal Challenges | Transport | Global leadership for the European transport industry | Developing the next generation of transport means as the way to secure market share in the future"}
{"code":"H2020-EU.3.2.4.","title":"Sustainable and competitive bio-based industries and supporting the development of a European bioeconomy","shortTitle":"Bio-based industries and supporting bio-economy","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive bio-based industries and supporting the development of a European bioeconomy","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based industries and supporting bio-economy"}
{"code":"H2020-EC","title":"Horizon 2020 Framework Programme","shortTitle":"EC Treaty","language":"en","classification":"Horizon 2020 Framework Programme","classification_short":"EC Treaty"}
{"code":"H2020-EU.3.6.2.4.","title":"Promote coherent and effective cooperation with third countries","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Innovative societies | Promote coherent and effective cooperation with third countries","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Innovative societies | Promote coherent and effective cooperation with third countries"}
{"code":"H2020-EU.3.1.7.5.","title":"Neurodegenerative diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Neurodegenerative diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Neurodegenerative diseases"}
{"code":"H2020-EU.2.1.6.4.","title":"Enabling European research in support of international space partnerships","shortTitle":"Research in support of international space partnerships","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling European research in support of international space partnerships","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Research in support of international space partnerships"}
{"code":"H2020-EU.2.1.5.1.","title":"Technologies for Factories of the Future","shortTitle":"Technologies for Factories of the Future","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced manufacturing and processing | Technologies for Factories of the Future","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced manufacturing and processing | Technologies for Factories of the Future"}
{"code":"H2020-EU.2.3.2.","title":"Specific support","shortTitle":"","language":"en","classification":"Industrial leadership | Innovation In SMEs | Specific support","classification_short":"Industrial Leadership | Innovation in SMEs | Specific support"}
{"code":"H2020-EU.1.4.2.","title":"Fostering the innovation potential of research infrastructures and their human resources","shortTitle":"Research infrastructures and their human resources","language":"en","classification":"Excellent science | Research Infrastructures | Fostering the innovation potential of research infrastructures and their human resources","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures and their human resources"}
{"code":"H2020-EU.3.3.1.2.","title":"Unlock the potential of efficient and renewable heating-cooling systems","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Reducing energy consumption and carbon foorpint by smart and sustainable use | Unlock the potential of efficient and renewable heating-cooling systems","classification_short":"Societal Challenges | Energy | Reducing energy consumption and carbon footprint | Unlock the potential of efficient and renewable heating-cooling systems"}
{"code":"H2020-EU.3.2.3.2.","title":"Developing competitive and environmentally-friendly European aquaculture","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Unlocking the potential of aquatic living resources | Developing competitive and environmentally-friendly European aquaculture","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Potential of aquatic living resources | Developing competitive and environmentally-friendly European aquaculture"}
{"code":"H2020-EU.3.2.1.3.","title":"Empowerment of rural areas, support to policies and rural innovation","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable agriculture and forestry | Empowerment of rural areas, support to policies and rural innovation","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable agriculture and forestry | Empowerment of rural areas, support to policies and rural innovation"}
{"code":"H2020-EU.3.2.5.3.","title":"Cross-cutting concepts and technologies enabling maritime growth","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Cross-cutting marine and maritime research | Cross-cutting concepts and technologies enabling maritime growth","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Cross-cutting marine and maritime research | Cross-cutting concepts and technologies enabling maritime growth"}
{"code":"H2020-EU.2.1.3.1.","title":"Cross-cutting and enabling materials technologies","shortTitle":"Cross-cutting and enabling materials technologies","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced materials | Cross-cutting and enabling materials technologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced materials | Cross-cutting and enabling materials technologies"}
{"code":"H2020-EU.3.1.1.2.","title":"Understanding disease","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Understanding health, wellbeing and disease | Understanding disease","classification_short":"Societal Challenges | Health | Understanding health, wellbeing and disease | Understanding disease"}
{"code":"H2020-Euratom-1.6.","title":"Lay the foundations for future fusion power plants by developing materials, technologies and conceptual design","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Lay the foundations for future fusion power plants by developing materials, technologies and conceptual design","classification_short":"Euratom | Indirect actions | Lay the foundations for future fusion power plants by developing materials, technologies and conceptual design"}
{"code":"H2020-EU.3.5.7.1.","title":"Reduce the use of the EU defined \"Critical raw materials\", for instance through low platinum or platinum free resources and through recycling or reducing or avoiding the use of rare earth elements","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | FCH2 (raw materials objective) | Reduce the use of the EU defined \"Critical raw materials\", for instance through low platinum or platinum free resources and through recycling or reducing or avoiding the use of rare earth elements","classification_short":"Societal Challenges | Climate and environment | FCH2 (raw materials objective) | Reduce the use of the EU defined \"Critical raw materials\", for instance through low platinum or platinum free resources and through recycling or reducing or avoiding the use of rare earth elements"}
{"code":"H2020-EU.2.2.","title":"INDUSTRIAL LEADERSHIP - Access to risk finance","shortTitle":"Access to risk finance","language":"en","classification":"Industrial leadership | Access to risk finance","classification_short":"Industrial Leadership | Access to risk finance"}
{"code":"H2020-EU.3.4.6.","title":"FCH2 (transport objectives)","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | FCH2 (transport objectives)","classification_short":"Societal Challenges | Transport | FCH2 (transport objectives)"}
{"code":"H2020-EU.4.d.","title":"A Policy Support Facility","shortTitle":"Policy Support Facility (PSF)","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | A Policy Support Facility","classification_short":"Spreading excellence and widening participation | Policy Support Facility (PSF)"}
{"code":"H2020-EU.2.1.1.7.","title":"ECSEL","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | ECSEL","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | ECSEL"}
{"code":"H2020-EU.3.1.5.","title":"Methods and data","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Methods and data","classification_short":"Societal Challenges | Health | Methods and data"}
{"code":"H2020-EU.3.7.7.","title":"Enhance stadardisation and interoperability of systems, including for emergency purposes","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Enhance stadardisation and interoperability of systems, including for emergency purposes","classification_short":"Societal Challenges | Secure societies | Enhance stadardisation and interoperability of systems, including for emergency purposes"}
{"code":"H2020-Euratom-1.7.","title":"Promote innovation and industry competitiveness","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Promote innovation and industry competitiveness","classification_short":"Euratom | Indirect actions | Promote innovation and industry competitiveness"}
{"code":"H2020-EU.2.1.5.3.","title":"Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries","shortTitle":"Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced manufacturing and processing | Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced manufacturing and processing | Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries"}
{"code":"H2020-EU.2.1.4.3.","title":"Innovative and competitive platform technologies","shortTitle":"Innovative and competitive platform technologies","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Biotechnology | Innovative and competitive platform technologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Biotechnology | Innovative and competitive platform technologies"}
{"code":"H2020-EU.1.2.3.","title":"FET Flagships","shortTitle":"FET Flagships","language":"en","classification":"Excellent science | Future and Emerging Technologies (FET) | FET Flagships","classification_short":"Excellent Science | Future and Emerging Technologies (FET) | FET Flagships"}
{"code":"H2020-EU.3.6.3.","title":"Reflective societies - cultural heritage and European identity","shortTitle":"Reflective societies","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Reflective societies - cultural heritage and European identity","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Reflective societies"}
{"code":"H2020-EU.3.6.3.3.","title":"Research on Europe's role in the world, on the mutual influence and ties between the world regions, and a view from outside on European cultures","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Reflective societies - cultural heritage and European identity | Research on Europe's role in the world, on the mutual influence and ties between the world regions, and a view from outside on European cultures","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Reflective societies | Research on Europe's role in the world, on the mutual influence and ties between the world regions, and a view from outside on European cultures"}
{"code":"H2020-EU.3.2.4.2.","title":"Developing integrated biorefineries","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive bio-based industries and supporting the development of a European bioeconomy | Developing integrated biorefineries","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based industries and supporting bio-economy | Developing integrated biorefineries"}
{"code":"H2020-EU.2.1.6.1.1.","title":"Safeguard and further develop a competitive, sustainable and entrepreneurial space industry and research community and strengthen European non-dependence in space systems","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling European competitiveness, non-dependence and innovation of the European space sector | Safeguard and further develop a competitive, sustainable and entrepreneurial space industry and research community and strengthen European non-dependence in space systems","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Competitiveness, non-dependence and innovation | Safeguard and further develop a competitive, sustainable and entrepreneurial space industry and research community and strengthen European non-dependence in space systems"}
{"code":"H2020-EU.3.1.3.2.","title":"Transferring knowledge to clinical practice and scalable innovation actions","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Treating and managing disease | Transferring knowledge to clinical practice and scalable innovation actions","classification_short":"Societal Challenges | Health | Treating and managing disease | Transferring knowledge to clinical practice and scalable innovation actions"}
{"code":"H2020-EU.2.","title":"Industrial leadership","shortTitle":"Industrial Leadership","language":"en","classification":"Industrial leadership","classification_short":"Industrial Leadership"}
{"code":"H2020-EU.3.4.1.3.","title":"Improving transport and mobility in urban areas","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Resource efficient transport that respects the environment | Improving transport and mobility in urban areas","classification_short":"Societal Challenges | Transport | Resource efficient transport that respects the environment | Improving transport and mobility in urban areas"}
{"code":"H2020-EU.4.e.","title":"Supporting access to international networks for excellent researchers and innovators who lack sufficient involvement in European and international networks","shortTitle":"","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | Supporting access to international networks for excellent researchers and innovators who lack sufficient involvement in European and international networks","classification_short":"Spreading excellence and widening participation | Supporting access to international networks for excellent researchers and innovators who lack sufficient involvement in European and international networks"}
{"code":"H2020-EU.3.2.1.","title":"Sustainable agriculture and forestry","shortTitle":"Sustainable agriculture and forestry","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable agriculture and forestry","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable agriculture and forestry"}
{"code":"H2020-EU.3.1.7.7.","title":"Respiratory diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Respiratory diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Respiratory diseases"}
{"code":"H2020-EU.3.4.8.6.","title":"Cross-cutting themes and activities (CCA)","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Cross-cutting themes and activities (CCA)","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Cross-cutting themes and activities (CCA)"}
{"code":"H2020-EU.3.4.8.4.","title":"Innovation Programme 4: IT Solutions for attractive railway services","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Innovation Programme 4: IT Solutions for attractive railway services","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Innovation Programme 4: IT Solutions for attractive railway services"}
{"code":"H2020-EU.3.2.2.","title":"Sustainable and competitive agri-food sector for a safe and healthy diet","shortTitle":"Sustainable and competitive agri-food sector for a safe and healthy diet","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Sustainable and competitive agri-food sector for a safe and healthy diet"}
{"code":"H2020-EU.3.4.3.","title":"Global leadership for the European transport industry","shortTitle":"Global leadership for the European transport industry","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Global leadership for the European transport industry","classification_short":"Societal Challenges | Transport | Global leadership for the European transport industry"}
{"code":"H2020-EU.1.4.2.1.","title":"Exploiting the innovation potential of research infrastructures","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Fostering the innovation potential of research infrastructures and their human resources | Exploiting the innovation potential of research infrastructures","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures and their human resources | Exploiting the innovation potential of research infrastructures"}
{"code":"H2020-EU.3.3.2.3.","title":"Develop competitive and environmentally safe technologies for CO2 capture, transport, storage and re-use","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Low-cost, low-carbon energy supply | Develop competitive and environmentally safe technologies for CO2 capture, transport, storage and re-use","classification_short":"Societal Challenges | Energy | Low-cost, low-carbon energy supply | Develop competitive and environmentally safe technologies for CO2 capture, transport, storage and re-use"}
{"code":"H2020-EU.3.6.3.1.","title":"Study European heritage, memory, identity, integration and cultural interaction and translation, including its representations in cultural and scientific collections, archives and museums, to better inform and understand the present by richer interpretations of the past","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Reflective societies - cultural heritage and European identity | Study European heritage, memory, identity, integration and cultural interaction and translation, including its representations in cultural and scientific collections, archives and museums, to better inform and understand the present by richer interpretations of the past","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Reflective societies | Study European heritage, memory, identity, integration and cultural interaction and translation, including its representations in cultural and scientific collections, archives and museums, to better inform and understand the present by richer interpretations of the past"}
{"code":"H2020-EU.2.1.2.2.","title":"Ensuring the safe and sustainable development and application of nanotechnologies","shortTitle":"Safe and sustainable nanotechnologies","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Nanotechnologies | Ensuring the safe and sustainable development and application of nanotechnologies","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Nanotechnologies | Safe and sustainable nanotechnologies"}
{"code":"H2020-EU.3.1.6.","title":"Health care provision and integrated care","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Health care provision and integrated care","classification_short":"Societal Challenges | Health | Health care provision and integrated care"}
{"code":"H2020-EU.3.4.5.9.","title":"Technology Evaluator","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | Technology Evaluator","classification_short":"Societal Challenges | Transport | CLEANSKY2 | Technology Evaluator"}
{"code":"H2020-EU.3.6.","title":"SOCIETAL CHALLENGES - Europe In A Changing World - Inclusive, Innovative And Reflective Societies","shortTitle":"Inclusive, innovative and reflective societies","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies"}
{"code":"H2020-EU.3.4.8.","title":"Shift2Rail JU","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU","classification_short":"Societal Challenges | Transport | Shift2Rail JU"}
{"code":"H2020-EU.3.2.6.3.","title":"Sustainable biorefineries","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Sustainable biorefineries","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Bio-based Industries Joint Technology Initiative (BBI-JTI) | Sustainable biorefineries"}
{"code":"H2020-EU.4.a.","title":"Teaming of excellent research institutions and low performing RDI regions","shortTitle":"Teaming of research institutions and low performing regions","language":"en","classification":"SPREADING EXCELLENCE AND WIDENING PARTICIPATION | Teaming of excellent research institutions and low performing RDI regions","classification_short":"Spreading excellence and widening participation | Teaming of research institutions and low performing regions"}
{"code":"H2020-EU.3.1.7.4.","title":"Diabetes","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Diabetes","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Diabetes"}
{"code":"H2020-EU.3.7.2.","title":"Protect and improve the resilience of critical infrastructures, supply chains and tranport modes","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Protect and improve the resilience of critical infrastructures, supply chains and tranport modes","classification_short":"Societal Challenges | Secure societies | Protect and improve the resilience of critical infrastructures, supply chains and tranport modes"}
{"code":"H2020-EU.3.1.2.","title":"Preventing disease","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Preventing disease","classification_short":"Societal Challenges | Health | Preventing disease"}
{"code":"H2020-EU.3.5.3.4.","title":"Improve societal awareness and skills on raw materials","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Ensuring the sustainable supply of non-energy and non-agricultural raw materials | Improve societal awareness and skills on raw materials","classification_short":"Societal Challenges | Climate and environment | Supply of non-energy and non-agricultural raw materials | Improve societal awareness and skills on raw materials"}
{"code":"H2020-EU.3.3.7.","title":"Market uptake of energy innovation - building on Intelligent Energy Europe","shortTitle":"Market uptake of energy innovation","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Market uptake of energy innovation - building on Intelligent Energy Europe","classification_short":"Societal Challenges | Energy | Market uptake of energy innovation"}
{"code":"H2020-EU.2.3.","title":"INDUSTRIAL LEADERSHIP - Innovation In SMEs","shortTitle":"Innovation in SMEs","language":"en","classification":"Industrial leadership | Innovation In SMEs","classification_short":"Industrial Leadership | Innovation in SMEs"}
{"code":"H2020-EU.2.1.1.3.","title":"Future Internet: Software, hardware, Infrastructures, technologies and services","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | Future Internet: Software, hardware, Infrastructures, technologies and services","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | Future Internet: Software, hardware, Infrastructures, technologies and services"}
{"code":"H2020-EU.3.1.5.3.","title":"Using in-silico medicine for improving disease management and prediction","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Methods and data | Using in-silico medicine for improving disease management and prediction","classification_short":"Societal Challenges | Health | Methods and data | Using in-silico medicine for improving disease management and prediction"}
{"code":"H2020-EU.3.6.1.1.","title":"The mechanisms to promote smart, sustainable and inclusive growth","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Inclusive societies | The mechanisms to promote smart, sustainable and inclusive growth","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Inclusive societies | The mechanisms to promote smart, sustainable and inclusive growth"}
{"code":"H2020-EU.1.3.1.","title":"Fostering new skills by means of excellent initial training of researchers","shortTitle":"MCSA Initial training","language":"en","classification":"Excellent science | Marie Skłodowska-Curie Actions | Fostering new skills by means of excellent initial training of researchers","classification_short":"Excellent Science | Marie-Sklodowska-Curie Actions | MCSA Initial training"}
{"code":"H2020-EU.3.6.2.3.","title":"Make use of the innovative, creative and productive potential of all generations","shortTitle":"","language":"en","classification":"Societal challenges | Europe In A Changing World - Inclusive, Innovative And Reflective Societies | Innovative societies | Make use of the innovative, creative and productive potential of all generations","classification_short":"Societal Challenges | Inclusive, innovative and reflective societies | Innovative societies | Make use of the innovative, creative and productive potential of all generations"}
{"code":"H2020-EU.3.5.1.3.","title":"Support mitigation policies, including studies that focus on impact from other sectoral policies","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Fighting and adapting to climate change | Support mitigation policies, including studies that focus on impact from other sectoral policies","classification_short":"Societal Challenges | Climate and environment | Fighting and adapting to climate change | Support mitigation policies, including studies that focus on impact from other sectoral policies"}
{"code":"H2020-EU.3.3.1.3.","title":"Foster European Smart cities and Communities","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Reducing energy consumption and carbon foorpint by smart and sustainable use | Foster European Smart cities and Communities","classification_short":"Societal Challenges | Energy | Reducing energy consumption and carbon footprint | Foster European Smart cities and Communities"}
{"code":"H2020-EU.3.1.1.","title":"Understanding health, wellbeing and disease","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Understanding health, wellbeing and disease","classification_short":"Societal Challenges | Health | Understanding health, wellbeing and disease"}
{"code":"H2020-Euratom-1.","title":"Indirect actions","shortTitle":"","language":"en","classification":"Euratom | Indirect actions","classification_short":"Euratom | Indirect actions"}
{"code":"H2020-EU.3.5.7.","title":"FCH2 (raw materials objective)","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | FCH2 (raw materials objective)","classification_short":"Societal Challenges | Climate and environment | FCH2 (raw materials objective)"}
{"code":"H2020-EU.3.7.3.","title":"Strengthen security through border management","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Strengthen security through border management","classification_short":"Societal Challenges | Secure societies | Strengthen security through border management"}
{"code":"H2020-EU.2.1.1.2.","title":"Next generation computing: Advanced and secure computing systems and technologies, including cloud computing","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | Next generation computing: Advanced and secure computing systems and technologies, including cloud computing","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | Next generation computing: Advanced and secure computing systems and technologies, including cloud computing"}
{"code":"H2020-EU.3.5.5.","title":"Developing comprehensive and sustained global environmental observation and information systems","shortTitle":"Environmental observation and information systems","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Developing comprehensive and sustained global environmental observation and information systems","classification_short":"Societal Challenges | Climate and environment | Environmental observation and information systems"}
{"code":"H2020-EU.3.1.7.10.","title":"Cancer","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Cancer","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Cancer"}
{"code":"H2020-EU.3.4.8.2.","title":"Innovation Programme 2: Advanced traffic management and control systems","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Shift2Rail JU | Innovation Programme 2: Advanced traffic management and control systems","classification_short":"Societal Challenges | Transport | Shift2Rail JU | Innovation Programme 2: Advanced traffic management and control systems"}
{"code":"H2020-EU.5.e.","title":"Develop the accessibility and the use of the results of publicly-funded research","shortTitle":"","language":"en","classification":"SCIENCE WITH AND FOR SOCIETY | Develop the accessibility and the use of the results of publicly-funded research","classification_short":"Science with and for Society | Develop the accessibility and the use of the results of publicly-funded research"}
{"code":"H2020-EU.3.4.4.","title":"Socio-economic and behavioural research and forward looking activities for policy making","shortTitle":"Socio-economic and behavioural research","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Socio-economic and behavioural research and forward looking activities for policy making","classification_short":"Societal Challenges | Transport | Socio-economic and behavioural research"}
{"code":"H2020-EU.3.3.2.","title":"Low-cost, low-carbon energy supply","shortTitle":"Low-cost, low-carbon energy supply","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Low-cost, low-carbon energy supply","classification_short":"Societal Challenges | Energy | Low-cost, low-carbon energy supply"}
{"code":"H2020-EU.3.4.2.2.","title":"Substantial improvements in the mobility of people and freight","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | Better mobility, less congestion, more safety and security | Substantial improvements in the mobility of people and freight","classification_short":"Societal Challenges | Transport | Mobility, safety and security | Substantial improvements in the mobility of people and freight"}
{"code":"H2020-EU.3.5.6.","title":"Cultural heritage","shortTitle":"Cultural heritage","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Cultural heritage","classification_short":"Societal Challenges | Climate and environment | Cultural heritage"}
{"code":"H2020-EU.3.5.3.","title":"Ensuring the sustainable supply of non-energy and non-agricultural raw materials","shortTitle":"Supply of non-energy and non-agricultural raw materials","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Ensuring the sustainable supply of non-energy and non-agricultural raw materials","classification_short":"Societal Challenges | Climate and environment | Supply of non-energy and non-agricultural raw materials"}
{"code":"H2020-EU.2.1.5.2.","title":"Technologies enabling energy-efficient systems and energy-efficient buildings with a low environmental impact","shortTitle":"Technologies enabling energy-efficient systems and buildings","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced manufacturing and processing | Technologies enabling energy-efficient systems and energy-efficient buildings with a low environmental impact","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced manufacturing and processing | Technologies enabling energy-efficient systems and buildings"}
{"code":"H2020-EU.1.4.1.2.","title":"Integrating and opening existing national and regional research infrastructures of European interest","shortTitle":"","language":"en","classification":"Excellent science | Research Infrastructures | Developing the European research infrastructures for 2020 and beyond | Integrating and opening existing national and regional research infrastructures of European interest","classification_short":"Excellent Science | Research Infrastructures | Research infrastructures for 2020 and beyond | Integrating and opening existing national and regional research infrastructures of European interest"}
{"code":"H2020-EU.3.7.8.","title":"Support the Union's external security policies including through conflict prevention and peace-building","shortTitle":"","language":"en","classification":"Societal challenges | Secure societies - Protecting freedom and security of Europe and its citizens | Support the Union's external security policies including through conflict prevention and peace-building","classification_short":"Societal Challenges | Secure societies | Support the Union's external security policies including through conflict prevention and peace-building"}
{"code":"H2020-EU.2.1.1.1.","title":"A new generation of components and systems: Engineering of advanced embedded and energy and resource efficient components and systems","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | A new generation of components and systems: Engineering of advanced embedded and energy and resource efficient components and systems","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | A new generation of components and systems: Engineering of advanced embedded and energy and resource efficient components and systems"}
{"code":"H2020-EU.1.1.","title":"EXCELLENT SCIENCE - European Research Council (ERC)","shortTitle":"European Research Council (ERC)","language":"en","classification":"Excellent science | European Research Council (ERC)","classification_short":"Excellent Science | European Research Council (ERC)"}
{"code":"H2020-EU.3.4.5.6.","title":"ITD Systems","shortTitle":"","language":"en","classification":"Societal challenges | Smart, Green And Integrated Transport | CLEANSKY2 | ITD Systems","classification_short":"Societal Challenges | Transport | CLEANSKY2 | ITD Systems"}
{"code":"H2020-EU.6.","title":"NON-NUCLEAR DIRECT ACTIONS OF THE JOINT RESEARCH CENTRE (JRC)","shortTitle":"Joint Research Centre (JRC) non-nuclear direct actions","language":"en","classification":"NON-NUCLEAR DIRECT ACTIONS OF THE JOINT RESEARCH CENTRE (JRC)","classification_short":"Joint Research Centre (JRC) non-nuclear direct actions"}
{"code":"H2020-EU.3.2.5.1.","title":"Climate change impact on marine ecosystems and maritime economy","shortTitle":"","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Cross-cutting marine and maritime research | Climate change impact on marine ecosystems and maritime economy","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Cross-cutting marine and maritime research | Climate change impact on marine ecosystems and maritime economy"}
{"code":"H2020-Euratom-1.2.","title":"Contribute to the development of solutions for the management of ultimate nuclear waste","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Contribute to the development of solutions for the management of ultimate nuclear waste","classification_short":"Euratom | Indirect actions | Contribute to the development of solutions for the management of ultimate nuclear waste"}
{"code":"H2020-EU.3.1.7.11.","title":"Rare/Orphan Diseases","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Rare/Orphan Diseases","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Rare/Orphan Diseases"}
{"code":"H2020-EU.3.1.4.1.","title":"Active ageing, independent and assisted living","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Active ageing and self-management of health | Active ageing, independent and assisted living","classification_short":"Societal Challenges | Health | Active ageing and self-management of health | Active ageing, independent and assisted living"}
{"code":"H2020-Euratom-1.4.","title":"Foster radiation protection","shortTitle":"","language":"en","classification":"Euratom | Indirect actions | Foster radiation protection","classification_short":"Euratom | Indirect actions | Foster radiation protection"}
{"code":"H2020-EU.2.2.2.","title":"The Equity facility providing equity finance for R&I: 'Union equity instruments for research and innovation'","shortTitle":"Equity facility","language":"en","classification":"Industrial leadership | Access to risk finance | The Equity facility providing equity finance for R&I: 'Union equity instruments for research and innovation'","classification_short":"Industrial Leadership | Access to risk finance | Equity facility"}
{"code":"H2020-EU.3.3.8.","title":"FCH2 (energy objectives)","shortTitle":"","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | FCH2 (energy objectives)","classification_short":"Societal Challenges | Energy | FCH2 (energy objectives)"}
{"code":"H2020-EU.3.2.3.","title":"Unlocking the potential of aquatic living resources","shortTitle":"Potential of aquatic living resources","language":"en","classification":"Societal challenges | Food security, sustainable agriculture and forestry, marine, maritime and inland water research, and the bioeconomy | Unlocking the potential of aquatic living resources","classification_short":"Societal Challenges | Food, agriculture, forestry, marine research and bioeconomy | Potential of aquatic living resources"}
{"code":"H2020-EU.3.5.2.3.","title":"Provide knowledge and tools for effective decision making and public engagement","shortTitle":"","language":"en","classification":"Societal challenges | Climate action, Environment, Resource Efficiency and Raw Materials | Protection of the environment, sustainable management of natural resources, water, biodiversity and ecosystems | Provide knowledge and tools for effective decision making and public engagement","classification_short":"Societal Challenges | Climate and environment | Protection of the environment | Provide knowledge and tools for effective decision making and public engagement"}
{"code":"H2020-EU.3.3.6.","title":"Robust decision making and public engagement","shortTitle":"Robust decision making and public engagement","language":"en","classification":"Societal challenges | Secure, clean and efficient energy | Robust decision making and public engagement","classification_short":"Societal Challenges | Energy | Robust decision making and public engagement"}
{"code":"H2020-EU.3.1.7.2.","title":"Osteoarthritis","shortTitle":"","language":"en","classification":"Societal challenges | Health, demographic change and well-being | Innovative Medicines Initiative 2 (IMI2) | Osteoarthritis","classification_short":"Societal Challenges | Health | Innovative Medicines Initiative 2 (IMI2) | Osteoarthritis"}
{"code":"H2020-EU.2.1.1.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies - Information and Communication Technologies (ICT)","shortTitle":"Information and Communication Technologies","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT)","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies"}
{"code":"H2020-EU.2.1.6.2.","title":"Enabling advances in space technology","shortTitle":"Enabling advances in space technology","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Space | Enabling advances in space technology","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Space | Enabling advances in space technology"}
{"code":"H2020-EU.2.1.1.4.","title":"Content technologies and information management: ICT for digital content, cultural and creative industries","shortTitle":"","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Information and Communication Technologies (ICT) | Content technologies and information management: ICT for digital content, cultural and creative industries","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Information and Communication Technologies | Content technologies and information management: ICT for digital content, cultural and creative industries"}
{"code":"H2020-EU.2.1.5.4.","title":"New sustainable business models","shortTitle":"New sustainable business models","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Advanced manufacturing and processing | New sustainable business models","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Advanced manufacturing and processing | New sustainable business models"}
{"code":"H2020-EU.2.1.4.","title":"INDUSTRIAL LEADERSHIP - Leadership in enabling and industrial technologies Biotechnology","shortTitle":"Biotechnology","language":"en","classification":"Industrial leadership | Leadership in enabling and industrial technologies | Biotechnology","classification_short":"Industrial Leadership | Leadership in enabling and industrial technologies (LEIT) | Biotechnology"}

View File

@ -1,17 +0,0 @@
{"id":"894593","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019"}
{"id":"897004","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"896300","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"892890","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"886828","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"886776","programme":"H2020-EU.2.1.4.","topics":"BBI-2019-SO3-D4"}
{"id":"886776","programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4"}
{"id":"895426","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"898218","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"893787","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"896189","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"891624","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"887259","programme":"H2020-EU.2.1.4.","topics":"BBI-2019-SO3-D3"}
{"id":"887259","programme":"H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3"}
{"id":"892834","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"895716","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"883730","programme":"H2020-EU.1.1.","topics":"ERC-2019-ADG"}

View File

@ -1,25 +0,0 @@
rcn;code;title;shortTitle;language
664331;H2020-EU.3.3.2.;Un approvisionnement en électricité à faible coût et à faibles émissions de carbone;Low-cost, low-carbon energy supply;fr
664355;H2020-EU.3.3.7.;Absorción por el mercado de la innovación energética - explotación del Programa Energía Inteligente - Europa Europe;Market uptake of energy innovation;es
664323;H2020-EU.3.3.1.;Ridurre il consumo di energia e le emissioni di carbonio grazie all'uso intelligente e sostenibile;Reducing energy consumption and carbon footprint;it
664233;H2020-EU.2.3.2.3.;Wsparcie innowacji rynkowych;Supporting market-driven innovation;pl
664199;H2020-EU.2.1.5.1.;Tecnologías para las fábricas del futuro;Technologies for Factories of the Future;es
664235;H2020-EU.3.;PRIORITÉ «Défis de société»;Societal Challenges;fr
664355;H2020-EU.3.3.7.;"Assorbimento di mercato dell'innovazione energetica - iniziative fondate sul programma ""Energia intelligente - Europa""";Market uptake of energy innovation;it
664355;H2020-EU.3.3.7.;"Markteinführung von Energieinnovationen Aufbau auf ""Intelligente Energie Europa";Market uptake of energy innovation;de
664235;H2020-EU.3.;"PRIORIDAD ""Retos de la sociedad""";Societal Challenges;es
664231;H2020-EU.2.3.2.2.;Mejorar la capacidad de innovación de las PYME;Enhancing the innovation capacity of SMEs;es
664223;H2020-EU.2.3.;LIDERAZGO INDUSTRIAL - Innovación en la pequeña y mediana empresa;Innovation in SMEs;es
664323;H2020-EU.3.3.1.;Réduire la consommation d'énergie et l'empreinte carbone en utilisant l'énergie de manière intelligente et durable;Reducing energy consumption and carbon footprint;fr
664323;H2020-EU.3.3.1.;Reducir el consumo de energía y la huella de carbono mediante un uso inteligente y sostenible;Reducing energy consumption and carbon footprint;es
664215;H2020-EU.2.1.6.4.;Beitrag der europäischen Forschung zu internationalen Weltraumpartnerschaften;Research in support of international space partnerships;de
664213;H2020-EU.2.1.6.3.;Permettere lo sfruttamento dei dati spaziali;;it
664213;H2020-EU.2.1.6.3.;Permettre l'exploitation des données spatiales;Enabling exploitation of space data;fr
664231;H2020-EU.2.3.2.2.;Zwiększenie zdolności MŚP pod względem innowacji;Enhancing the innovation capacity of SMEs;pl
664231;H2020-EU.2.3.2.2.;Rafforzare la capacità di innovazione delle PMI;Enhancing the innovation capacity of SMEs;it
664213;H2020-EU.2.1.6.3.;Grundlagen für die Nutzung von Weltraumdaten;Enabling exploitation of space data;de
664211;H2020-EU.2.1.6.2.;Favorecer los avances en las tecnologías espaciales;Enabling advances in space technology;es
664209;H2020-EU.2.1.6.1.;Assurer la compétitivité et l'indépendance de l'Europe et promouvoir l'innovation dans le secteur spatial européen;Competitiveness, non-dependence and innovation;fr
664231;H2020-EU.2.3.2.2.;Renforcement de la capacité d'innovation des PME;Enhancing the innovation capacity of SMEs;fr
664203;H2020-EU.2.1.5.3.;Tecnologías sostenibles, eficientes en su utilización de recursos y de baja emisión de carbono en las industrias de transformación de gran consumo energético;Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries;es
664103;H2020-EU.1.2.1.;FET Open;FET Open;es
1 rcn code title shortTitle language
2 664331 H2020-EU.3.3.2. Un approvisionnement en électricité à faible coût et à faibles émissions de carbone Low-cost, low-carbon energy supply fr
3 664355 H2020-EU.3.3.7. Absorción por el mercado de la innovación energética - explotación del Programa Energía Inteligente - Europa Europe Market uptake of energy innovation es
4 664323 H2020-EU.3.3.1. Ridurre il consumo di energia e le emissioni di carbonio grazie all'uso intelligente e sostenibile Reducing energy consumption and carbon footprint it
5 664233 H2020-EU.2.3.2.3. Wsparcie innowacji rynkowych Supporting market-driven innovation pl
6 664199 H2020-EU.2.1.5.1. Tecnologías para las fábricas del futuro Technologies for Factories of the Future es
7 664235 H2020-EU.3. PRIORITÉ «Défis de société» Societal Challenges fr
8 664355 H2020-EU.3.3.7. Assorbimento di mercato dell'innovazione energetica - iniziative fondate sul programma "Energia intelligente - Europa" Market uptake of energy innovation it
9 664355 H2020-EU.3.3.7. Markteinführung von Energieinnovationen – Aufbau auf "Intelligente Energie – Europa Market uptake of energy innovation de
10 664235 H2020-EU.3. PRIORIDAD "Retos de la sociedad" Societal Challenges es
11 664231 H2020-EU.2.3.2.2. Mejorar la capacidad de innovación de las PYME Enhancing the innovation capacity of SMEs es
12 664223 H2020-EU.2.3. LIDERAZGO INDUSTRIAL - Innovación en la pequeña y mediana empresa Innovation in SMEs es
13 664323 H2020-EU.3.3.1. Réduire la consommation d'énergie et l'empreinte carbone en utilisant l'énergie de manière intelligente et durable Reducing energy consumption and carbon footprint fr
14 664323 H2020-EU.3.3.1. Reducir el consumo de energía y la huella de carbono mediante un uso inteligente y sostenible Reducing energy consumption and carbon footprint es
15 664215 H2020-EU.2.1.6.4. Beitrag der europäischen Forschung zu internationalen Weltraumpartnerschaften Research in support of international space partnerships de
16 664213 H2020-EU.2.1.6.3. Permettere lo sfruttamento dei dati spaziali it
17 664213 H2020-EU.2.1.6.3. Permettre l'exploitation des données spatiales Enabling exploitation of space data fr
18 664231 H2020-EU.2.3.2.2. Zwiększenie zdolności MŚP pod względem innowacji Enhancing the innovation capacity of SMEs pl
19 664231 H2020-EU.2.3.2.2. Rafforzare la capacità di innovazione delle PMI Enhancing the innovation capacity of SMEs it
20 664213 H2020-EU.2.1.6.3. Grundlagen für die Nutzung von Weltraumdaten Enabling exploitation of space data de
21 664211 H2020-EU.2.1.6.2. Favorecer los avances en las tecnologías espaciales Enabling advances in space technology es
22 664209 H2020-EU.2.1.6.1. Assurer la compétitivité et l'indépendance de l'Europe et promouvoir l'innovation dans le secteur spatial européen Competitiveness, non-dependence and innovation fr
23 664231 H2020-EU.2.3.2.2. Renforcement de la capacité d'innovation des PME Enhancing the innovation capacity of SMEs fr
24 664203 H2020-EU.2.1.5.3. Tecnologías sostenibles, eficientes en su utilización de recursos y de baja emisión de carbono en las industrias de transformación de gran consumo energético Sustainable, resource-efficient and low-carbon technologies in energy-intensive process industries es
25 664103 H2020-EU.1.2.1. FET Open FET Open es

View File

@ -0,0 +1,399 @@
[{"acronym": "GiSTDS",
"contentUpdateDate": "2022-10-08 18:28:27",
"ecMaxContribution": 203149.44,
"ecSignatureDate": "2020-03-16",
"endDate": "2022-11-30",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-SE",
"grantDoi": "10.3030/886988",
"id": 894593,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2019",
"nature": "",
"objective": "Coordination of different players in active distribution systems by increasing the penetration of distributed energy resources and rapid advances on the aggregators, microgrids and prosumers with private territory individuals establishes new challenges in control and management systems from the owners point of views. Undertaking digitalization of future distribution systems, GiSTDS introduces an edge computing framework based on GridEye, the core production of DEPsys, which provides real time visibility and monitoring. Relevant drawbacks in the distribution system management platforms in handling the scalability of players, look ahead preventive management systems regarding contingency condition and lack of physical boundaries for third party entities (aggregators) will be addressed by GiSTDS. The main novelties of this project in comparison to the GridEye are: 1) Developed P2P trading module provides automated double auction negotiation in real time fashion which enables all private entities with and without specific physical boundaries to participate in local and flexible electricity markets. 2) Modification of GridEyes modules to address the scalability and resilient operation in both the normal and contingency conditions. 3) To present a look ahead energy managements schemes for the operators, GiSTDS will be equipped to the forecasting module based on auto-regressive with exogenous variables (ARX) and machine learning techniques such as long short term memory (LSTM) and recursive neural network (RNN). Therefore, GiSTDS based on modified and developed modules explores comprehensive distributed framework for control, monitoring and operation of energy systems with multiple dispersed players in different scales. The edge computing solutions in GiSTDS eectively digitalis energy systems and creates major opportunities in terms of avoiding big data concerns and getting a bottom-up monitoring approach for the network supervision.",
"rcn": 227870,
"startDate": "2020-12-01",
"status": "TERMINATED",
"subCall": "H2020-MSCA-IF-2019",
"title": "GridEye Scalable Transactive Distribution Systems",
"topics": "MSCA-IF-2019",
"totalCost": 203149.44
},{
"acronym": "REAL",
"contentUpdateDate": "2022-04-27 21:10:20",
"ecMaxContribution": 1498830,
"ecSignatureDate": "2020-09-29",
"endDate": "2026-03-31",
"frameworkProgramme": "H2020",
"fundingScheme": "ERC-STG",
"grantDoi": "10.3030/947908",
"id": 897004,
"legalBasis": "H2020-EU.1.1.",
"masterCall": "ERC-2020-STG",
"nature": "",
"objective": "In the last decade, machine learning (ML) has become a fundamental tool with a growing impact in many disciplines, from science to industry. However, nowadays, the scenario is changing: data are exponentially growing compared to the computational resources (post Moore's law era), and ML algorithms are becoming crucial building blocks in complex systems for decision making, engineering, science. Current machine learning is not suitable for the new scenario, both from a theoretical and a practical viewpoint: (a) the lack of cost-effectiveness of the algorithms impacts directly the economic/energetic costs of large scale ML, making it barely affordable by universities or research institutes; (b) the lack of reliability of the predictions affects critically the safety of the systems where ML is employed. To deal with the challenges posed by the new scenario, REAL will lay the foundations of a solid theoretical and algorithmic framework for reliable and cost-effective large scale machine learning on modern computational architectures. In particular, REAL will extend the classical ML framework to provide algorithms with two additional guarantees: (a) the predictions will be reliable, i.e., endowed with explicit bounds on their uncertainty guaranteed by the theory; (b) the algorithms will be cost-effective, i.e., they will be naturally adaptive to the new architectures and will provably achieve the desired reliability and accuracy level, by using minimum possible computational resources. The algorithms resulting from REAL will be released as open-source libraries for distributed and multi-GPU settings, and their effectiveness will be extensively tested on key benchmarks from computer vision, natural language processing, audio processing, and bioinformatics. The methods and the techniques developed in this project will help machine learning to take the next step and become a safe, effective, and fundamental tool in science and engineering for large scale data problems.",
"rcn": 231448,
"startDate": "2021-04-01",
"status": "SIGNED",
"subCall": "ERC-2020-STG",
"title": "Reliable and cost-effective large scale machine learning",
"topics": "ERC-2020-STG",
"totalCost": 1498830
},{
"acronym": "CARL-PdM",
"contentUpdateDate": "2022-08-09 09:09:33",
"ecMaxContribution": 50000,
"ecSignatureDate": "2017-07-13",
"endDate": "2018-01-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-1",
"grantDoi": "10.3030/781123",
"id": 896300,
"legalBasis": "H2020-EU.2.1.1.",
"masterCall": "H2020-SMEInst-2016-2017",
"nature": "",
"objective": "\"\"\"Industry 4.0 preaches a complete revolution of industrial process and promises huge efficiency gains by a complete virtualization of the factory, numerical design tools, automation of the logistics and the routing of the parts, smart machines, 3D printing, cyber-physical systems, predictive maintenance and control of the whole factory by an intelligent system. \nIn the next 10 years, industry 4.0 is expected to change the way we operate our factories and to create 1250 Billion € of additional value added in Europe.\nAlso , according to ARC Advisory Group, the predictive maintenance market is estimated to grow from 1,404.3M€ in 2016 to 4,904.0M€ by 2021.\nCARL-PdM is a innovative IIoT data powered predictive maintenance platform encompass the core of \"\"Industry 4.0\"\" with a new maintenance paradigm : maintenance is a production function whose aim should be to optimize production output and quality.\nWe will leverage the IoT revolution to achieve these goal.\nThis software solution, CARL-PdM, provides many core capabilities in industrial scenarios, including edge analytics who provide a way to pre-process the data so that only the pertinent information is sent to the predictive layer (Auto Classification and Machine learning).\nThe predictive layer will categorize data into abstract class which represent technical assets behavior. It is a reliable and reproducible approach.\nCompetitive advantages: \n- Reduce failure by 50%, maintenance cost by 30%, production stops by 70%, energetic consumption by 20%, Time To Repair by 30%\n- Increase production flexibility\n- System agnostic to machines\n- Machine-learning algorithm that compares the fault prediction and sensor data with historical data, predicting best maintenance activity regarding to production and quality objectives \n\nThe solution will be implemented at a global scale, starting in European markets: France, Italy, Belgium for early market uptake and testing; and then the biggest EU markets (Germany, UK, Poland and Spain).\n\"",
"rcn": 211479,
"startDate": "2017-08-01",
"status": "CLOSED",
"subCall": "H2020-SMEINST-1-2016-2017",
"title": "Next Generation Holistic Predictive Maintenance Software",
"topics": "SMEInst-01-2016-2017",
"totalCost": 71429
},{
"acronym": "OPTIMAL",
"contentUpdateDate": "2022-11-02 12:00:16",
"ecMaxContribution": 772800,
"ecSignatureDate": "2020-12-01",
"endDate": "2025-12-31",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-RISE",
"grantDoi": "10.3030/101007963",
"id": 892890,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-RISE-2020",
"nature": "",
"objective": "The proposed project is to develop and maintain long term collaborations between Europe and China towards CO2 neutral Olefin production. We will realize this objective by carrying out joint research in big data and artificial intelligence (AI) for ethylene plants integrated with carbon capture and CO2 utilisation. Specifically this requires a universal set of skills such as pilot scale experimental study, process modelling and analysis, optimisation, catalysis and reaction kinetics that will be strengthened by the individual mobility of researchers between Europe and China. There are 12 partners involved in OPTIMAL with 3 industrial partners. These partners are world leading in their respective research areas. OPTIMAL is planned to start from Aug. 2021 and will continue for 48 months. There will be 28 experienced and 35 early stage researchers participating in OPTIMAL with exchange visits of 262 person months. The funding of €772,800 will be requested from European Commission to support these planned secondments. The European beneficiaries are experts at catalysis, CO2 utilisation, intensified carbon capture, reaction mechanism and kinetics & CFD studies, hybrid modelling, molecular simulation and dynamic optimisation, whilst the Chinese partners are experts at exergy analysis, process control and optimisation, solvent-based carbon capture & data-driven model development, deep reinforced learning based model free control, intelligent predictive control, physics-based reduced order model development, soft exergy sensor development and optimisation under uncertainty. Transfer of knowledge will take place through these exchange visits. We will generate at least 25 Journal publications and 25 Conference papers. 2 Special Issues will be established in leading journals such as Applied Energy. 2 Workshops and 2 Special Sessions in major international conferences will also be organised to disseminate project results.",
"rcn": 232682,
"startDate": "2021-08-01",
"status": "SIGNED",
"subCall": "H2020-MSCA-RISE-2020",
"title": "Smart and CO2 neutral Olefin Production by arTificial Intelligence and MAchine Learning",
"topics": "MSCA-RISE-2020",
"totalCost": 1205200
},{
"acronym": "e-DNA BotStop",
"contentUpdateDate": "2022-08-15 14:18:25",
"ecMaxContribution": 50000,
"ecSignatureDate": "2019-04-11",
"endDate": "2019-10-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-1",
"grantDoi": "10.3030/854460",
"id": 886828,
"legalBasis": "H2020-EU.2.3.",
"masterCall": "H2020-EIC-SMEInst-2018-2020",
"nature": "",
"objective": "In the last decade there has been an explosion in Online Travel Agents (OTAs) worldwide. OTAs undertake the mammoth task of undercutting the flight prices of major airlines through the use of Bots (an internet Bot, also known as web robot, WWW robot or simply bot, is a software application that runs automated tasks (scripts) over the Internet.). Bots are used to scrape airlines for valuable data to benchmark aggregate flight costs, which drives down prices for the consumer.\n\nWhilst beneficial to consumers, scraping harms travel companies because:\n•\tBots can engage with a websites server hardware and cause website traffic to run slower, in some cases causing server downtime and Direct Denial of Service (DDoS)\n•\tLong term Search Engine Optimization (SEO) damage; distorting analytical marketing metrics.\n•\tDiverting customers to purchase products via third party resellers, limiting chances for up-sell and cross sell opportunities. \n\nThis problem is tackled by anti-scrape approaches. However, current anti-scrape/booking bot solutions are only capable of distinguishing between human traffic and bot traffic through supervised algorithms that do not work to the degree of efficacy required. \n\n\nOur proposed solution is BotStop an algorithmic approach to identifying Bots and scrapers and to policing malicious application traffic. eDNA will provide a solution which reintroduces transparency into the process of purchasing flights and will streamline customer website experience to ensure a more stress-free experience",
"rcn": 223866,
"startDate": "2019-05-01",
"status": "CLOSED",
"subCall": "H2020-SMEInst-2018-2020-1",
"title": "e-DNA BotStop",
"topics": "EIC-SMEInst-2018-2020",
"totalCost": 71429
},{
"acronym": "NAUTIC",
"contentUpdateDate": "2022-08-25 21:32:49",
"ecMaxContribution": 184707.84,
"ecSignatureDate": "2021-04-27",
"endDate": "2023-09-30",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-ST",
"grantDoi": "10.3030/101033666",
"id": 8867767,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2020",
"nature": "",
"objective": "Bringing a new drug to the European market takes at least 10 years and 2.5 BEUR of R&D effort. Computational methods significantly shorten this journey but they require knowledge of the structure and interactions of the involved biomolecules - most often proteins. In recent years, a tremendous progress has been made in the field of a single protein 3D structure prediction. However, predicting protein assemblies -the most crucial step - still remains very challenging. The aim of this IF project is to revolutionise protein complexes prediction methods. This will be achieved first by developing novel, effective and fast approaches for the calculation of the vibrational entropy, key to protein-protein docking mechanisms. Then, in an innovative and multi-disciplinary approach, the Experienced Researcher (ER) aims to combine advanced physics-based models with machine learning methods using data from structural and sequence databases. Finally, this project will link all the pieces together and release them in the form of a web-server in order to allow the community to benefit from the results of this research.\nThe ER will carry out the fellowship in the Centre National de la Recherche Scientifique - CNRS in Grenoble, France. CNRS carries out research in all scientific fields of knowledge and the Supervisor is a renowned expert in data science, computing, and software engineering. Through a well-thought two-way knowledge transfer and training plan, this project will benefit both the host institution and the ER in terms of scientific knowledge, network and open the path for new applications to potentially exploit at the European or global level. The project will also place the ER as a highly visible researcher in the field and ideally set her as a valuable resource for European industrial actors.",
"rcn": 235804,
"startDate": "2021-07-01",
"status": "TERMINATED",
"subCall": "H2020-MSCA-IF-2020",
"title": "Novel computational avenues in protein-protein docking",
"topics": "MSCA-IF-2020",
"totalCost": 184707.84
},{
"acronym": "EnzVolNet",
"contentUpdateDate": "2022-08-15 12:50:20",
"ecMaxContribution": 158121.6,
"ecSignatureDate": "2017-02-14",
"endDate": "2019-04-30",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-ST",
"grantDoi": "10.3030/753045",
"id": 101003374,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2016",
"nature": "",
"objective": "Natural enzymes have evolved to perform their functions under complex selective pressures, being capable of accelerating reactions by several orders of magnitude. In particular, heteromeric enzyme complexes catalyze an enormous array of useful reactions that are often allosterically regulated by different protein partners. Unfortunately, the underlying physical principles of this regulation are still under debate, which makes the alteration of enzyme structure towards useful isolated subunits a tremendous challenge for modern chemical biology. Exploitation of isolated enzyme subunits, however, is advantageous for biosynthetic applications as it reduces the metabolic stress on the host cell and greatly simplifies efforts to engineer specific properties of the enzyme. Current approaches to alter natural enzyme complexes are based on the evaluation of thousands of variants, which make them economically unviable and the resulting catalytic efficiencies lag far behind their natural counterparts. The revolutionary nature of EnzVolNet relies on the application of conformational network models (e.g Markov State Models) to extract the essential functional protein dynamics and key conformational states, reducing the complexity of the enzyme design paradigm and completely reformulating previous computational design approaches. Initial mutations are extracted from costly random mutagenesis experiments and chemoinformatic tools are used to identify beneficial mutations leading to more proficient enzymes. This new strategy will be applied to develop stand-alone enzymes from heteromeric protein complexes, with advantageous biosynthetic properties and improve activity and substrate scope. Experimental evaluation of our computational predictions will finally elucidate the potential of the present approach for mimicking Natures rules of evolution.",
"rcn": 208408,
"startDate": "2017-05-01",
"status": "CLOSED",
"subCall": "H2020-MSCA-IF-2016",
"title": "COMPUTATIONAL EVOLUTION OF ENZYME VARIANTS THROUGH CONFORMATIONAL NETWORKS",
"topics": "MSCA-IF-2016",
"totalCost": 158121.6
},{
"acronym": "FASTPARSE",
"contentUpdateDate": "2022-08-18 09:56:14",
"ecMaxContribution": 1481747,
"ecSignatureDate": "2016-12-08",
"endDate": "2022-07-31",
"frameworkProgramme": "H2020",
"fundingScheme": "ERC-STG",
"grantDoi": "10.3030/714150",
"id": 886776,
"legalBasis": "H2020-EU.1.1.",
"masterCall": "ERC-2016-STG",
"nature": "",
"objective": "The popularization of information technology and the Internet has resulted in an unprecedented growth in the scale at which individuals and institutions generate, communicate and access information. In this context, the effective leveraging of the vast amounts of available data to discover and address people's needs is a fundamental problem of modern societies.\n\nSince most of this circulating information is in the form of written or spoken human language, natural language processing (NLP) technologies are a key asset for this crucial goal. NLP can be used to break language barriers (machine translation), find required information (search engines, question answering), monitor public opinion (opinion mining), or digest large amounts of unstructured text into more convenient forms (information extraction, summarization), among other applications.\n\nThese and other NLP technologies rely on accurate syntactic parsing to extract or analyze the meaning of sentences. Unfortunately, current state-of-the-art parsing algorithms have high computational costs, processing less than a hundred sentences per second on standard hardware. While this is acceptable for working on small sets of documents, it is clearly prohibitive for large-scale processing, and thus constitutes a major roadblock for the widespread application of NLP.\n\nThe goal of this project is to eliminate this bottleneck by developing fast parsers that are suitable for web-scale processing. To do so, FASTPARSE will improve the speed of parsers on several fronts: by avoiding redundant calculations through the reuse of intermediate results from previous sentences; by applying a cognitively-inspired model to compress and recode linguistic information; and by exploiting regularities in human language to find patterns that the parsers can take for granted, avoiding their explicit calculation. The joint application of these techniques will result in much faster parsers that can power all kinds of web-scale NLP applications.",
"rcn": 206936,
"startDate": "2017-02-01",
"status": "SIGNED",
"subCall": "ERC-2016-STG",
"title": "Fast Natural Language Parsing for Large-Scale NLP",
"topics": "ERC-2016-STG",
"totalCost": 1481747
},{
"acronym": "StarLink",
"contentUpdateDate": "2022-08-10 09:42:53",
"ecMaxContribution": 50000,
"ecSignatureDate": "2018-05-04",
"endDate": "2018-08-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-1",
"grantDoi": "10.3030/815698",
"id": 815698,
"legalBasis": "H2020-EU.2.3.",
"masterCall": "H2020-EIC-SMEInst-2018-2020",
"nature": "",
"objective": "Vacuum pumps are used in thousands of industrial applications, playing a vital role in food processing, semiconductors, chemicals, pharmaceuticals and many other manufacturing and assembly processes. However, todays pumps are currently unable to provide any type of insights that could help users anticipate a pump malfunction, plan maintenance procedures or setting the adjustments. Pump malfunctions or breakdowns, due to unplanned maintenance or improper settings, cost millions of euros in lost revenues every year as production and logistic lines lie idle waiting for pumps to be fixed, and when they are not optimized their productivity decrease or their energy consumption go up. \n\nBut now, DVP, a vacuum pump manufacturer, has developed the solution to these challenges through StarLink, the worlds first intelligent vacuum pump system. StarLink is a patent-pending system that uses data analytics and machine learning to identify pump malfunctions before they happen, propose actions to be taken, and automatically adjust the operation parameters if the problem relates to the setting. This will reduce pump downtime-related costs by 30%, increase their productivity by 50% and make easier the operation manager tasks. \n\nThe combination of our deep knowledge of vacuum pumps needs with the machine learning expertise of the university of Ferrara will create the most intelligent device to improve the competitiveness of European companies. Additionally, StarLink will contribute to DVPs growth in terms of employees and product portfolio since we will be able to offer a wider range of products and services related to vacuum pumps, which will allow us to enter new markets and sell more units. By 2023, it will generate €3M in yearly revenue with net profits of €2M to our company.",
"rcn": 217721,
"startDate": "2018-05-01",
"status": "CLOSED",
"subCall": "H2020-SMEInst-2018-2020-1",
"title": "StarLink: The World's First Intelligent Vacuum Pump System",
"topics": "EIC-SMEInst-2018-2020",
"totalCost": 71429
},{
"acronym": "ARMOUR",
"contentUpdateDate": "2022-08-18 16:42:12",
"ecMaxContribution": 191149.44,
"ecSignatureDate": "2020-03-16",
"endDate": "2022-10-14",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-SE",
"grantDoi": "10.3030/890844",
"id": 890844,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2019",
"nature": "",
"objective": "General awareness about the smart grid technologies has improved in the last decade due to various energy liberalization actions taken by the European Union. However, the lack of well-developed technologies, has been main cause of slow acceptance of smart grids. This calls for the identification of unexplored research areas in smart grids. Positive outcomes of the research can help in laying down new and well-defined standards for the smart grids and associated intelligent technologies. A convenient and easily integrable product can also help in encouraging various distribution system operators to accept the new technologies. Massive amount of data is already being collected from the distribution networks using smart meters. Rapid advancements in machine learning research have opened up new avenues for data utilization in smart grid. \nForerunners like DEPsys (a smart grid technology company based in Switzerland), have now simplified the distribution system data for further analysis and research. A critical concern raised by DEPsys customers, is their inability to trace the source of power quality issues in the distribution network, which in-turn leads to both energy and economic losses over time. This project builds up on existing infrastructure of DEPsys and aims to be an AMROUR (by improving robustness) for distribution networks against power quality events. The main objectives are: (i) leveraging machine learning for condition monitoring and tracing power quality events, and (ii) to develop a smart grid technology which assists the distribution system operators in prevention and diagnosis of power quality events.",
"rcn": 227886,
"startDate": "2020-10-15",
"status": "SIGNED",
"subCall": "H2020-MSCA-IF-2019",
"title": "smARt Monitoring Of distribUtion netwoRks for robust power quality",
"topics": "MSCA-IF-2019",
"totalCost": 191149.44
},{
"acronym": "Target5LO",
"contentUpdateDate": "2022-08-16 11:09:20",
"ecMaxContribution": 195454.8,
"ecSignatureDate": "2018-03-19",
"endDate": "2020-02-29",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-CAR",
"grantDoi": "10.3030/792495",
"id": 792495,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2017",
"nature": "",
"objective": "Drug efficacy is cornerstone for successful drug discovery programs. Considering that, on average, FDA-approved drugs modulate dozens of off-targets it remains imperative to find strategies to overcome adverse drug reactions correlated with pernicious polypharmacology. In fact, several chemical entities displaying promising anticancer are discontinued from drug development pipelines due to narrow therapeutic windows in pre-clinical models. Here, we propose the development of antibody-drug conjugates exploring the unique bioactivity profile of the naphthoquinone natural product-lapachone (Lp) against acute myeloid leukemia (AML), an unmet medical need. Using a machine learning method, we disclosed Lp as an allosteric modulator of 5-lipoxygenase (5-LO), correlated its anticancer activity with 5-LO expression in blood cancers and showed its efficacy in a disseminated mouse model of AML.\n\nIn this project, a comprehensive investigation of novel means for the targeted delivery of Lp to leukaemia cells is sought after, considering both the promising bioactivity profile but also the significant toxicity in untargeted dosage forms. We apply state-of-the-art synthetic medicinal chemistry to design and access cleavable linkers, and site-specifically conjugate Lp to an anti-IL7R antibody, a validated biomarker in AML and other leukaemias. We aim at employing biophysical and chemical biology approaches to validate quantitative and fast release of Lp with accurate spatiotemporal control in in vitro disease models. Finally, we will validate the deployment of the constructs through preclinical in vivo models of AML. We foresee broad applicability of the developed technology, which may have profound implications in drug discovery. Upon successful completion of this research program, we hope to yield a new targeted drug to treat AML patients with improved efficacy and reduced side-effects.",
"rcn": 215065,
"startDate": "2018-03-01",
"status": "CLOSED",
"subCall": "H2020-MSCA-IF-2017",
"title": "Targeting 5-lipoxygenase in the context of Acute Myeloid Leukemia",
"topics": "MSCA-IF-2017",
"totalCost": 195454.8
},{
"acronym": "Smart Library",
"contentUpdateDate": "2022-08-11 19:59:53",
"ecMaxContribution": 1200000,
"ecSignatureDate": "2017-02-26",
"endDate": "2018-12-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-2",
"grantDoi": "10.3030/756826",
"id": 756826,
"legalBasis": "H2020-EU.3.6.",
"masterCall": "H2020-SMEInst-2016-2017",
"nature": "",
"objective": "Children today are natives of technology, having frequent access to digital devices both at home and at school. Digital devices are today even more used than TV. Worryingly, the offering of high quality educational apps is very limited and expensive. Parents and educators are concerned about this and are actively searching for better alternatives.\n\nTo help resolve these issues, Smile and Learn places technology at the service of education with the mission of helping children 2 to 12 years old learn while having fun using digital devices. Like the north American educational philosopher John Dewey, we believe that “if we teach todays students as we taught yesterdays, we rob them of tomorrow.” Our vision is to become the global leader in Edutainment (Entertainment plus Education). To do so we have developed the Smart Digital Library, a single platform of interactive games and stories that, as of today, provides access to up to 30 individual proprietary apps (100 apps by end 2018). The “Library” can be used at home, on the go or at school and provides “smart” recommendations to children, their parents and educators.\n\nIn August 2016, Smile and Learn successfully completed phase I of SME Instrument, finalizing our first release of the Smart Library rolled out in real production environments both at pilot schools (today more than 100 schools use the Library, including 10 special education schools) and with families (+7,000 active users) in different markets, including the US, Spain, the UK, France, Mexico and Colombia, with very positive feedback. We already have more than 30,000 users worldwide with no marketing expenditure.\n\nWe are now moving forward to make the Smart Library a global state-of-the-art product in the edutainment industry by scaling it up and rolling out a powerful dissemination plan, that we expect to conduct with the support of Phase 2 H2020",
"rcn": 208757,
"startDate": "2017-03-01",
"status": "CLOSED",
"subCall": "H2020-SMEINST-2-2016-2017",
"title": "Smart Library of Edutainment: technology and gamification at the service of Education",
"topics": "SMEInst-12-2016-2017",
"totalCost": 1827500
},{
"acronym": "PALGLAC",
"contentUpdateDate": "2022-08-25 10:28:12",
"ecMaxContribution": 2425298.75,
"ecSignatureDate": "2018-05-14",
"endDate": "2024-09-30",
"frameworkProgramme": "H2020",
"fundingScheme": "ERC-ADG",
"grantDoi": "10.3030/787263",
"id": 787263,
"legalBasis": "H2020-EU.1.1.",
"masterCall": "ERC-2017-ADG",
"nature": "",
"objective": "Ice sheets regulate Earths climate by reflecting sunlight away, enabling suitable temperatures for human habitation. Warming is reducing these ice masses and raising sea level. Glaciologists predict ice loss using computational ice sheet models which interact with climate and oceans, but with caveats that highlight processes are inadequately encapsulated. Weather forecasting made a leap in skill by comparing modelled forecasts with actual outcomes to improve physical realism of their models. This project sets out an ambitious programme to adopt this data-modelling approach in ice sheet modelling. Given their longer timescales (100-1000s years) we will use geological and geomorphological records of former ice sheets to provide the evidence; the rapidly growing field of palaeoglaciology.\n\nFocussing on the most numerous and spatially-extensive records of palaeo ice sheet activity - glacial landforms - the project aims to revolutionise understanding of past, present and future ice sheets. Our mapping campaign (Work-Package 1), including by machine learning techniques (WP2), should vastly increase the evidence-base. Resolution of how subglacial landforms are generated and how hydrological networks develop (WP3) would be major breakthroughs leading to possible inversions to information on ice thickness or velocity, and with key implications for ice flow models and hydrological effects on ice dynamics. By pioneering techniques and coding for combining ice sheet models with landform data (WP4) we will improve knowledge of the role of palaeo-ice sheets in Earth system change. Trialling of numerical models in these data-rich environments will highlight deficiencies in process-formulations, leading to better models. Applying our coding to combine landforms and geochronology to optimise modelling (WP4) of the retreat of the Greenland and Antarctic ice sheets since the last glacial will provide spin up glaciological conditions for models that forecast sea level rise.",
"rcn": 216167,
"startDate": "2018-10-01",
"status": "SIGNED",
"subCall": "ERC-2017-ADG",
"title": "Palaeoglaciological advances to understand Earths ice sheets by landform analysis",
"topics": "ERC-2017-ADG",
"totalCost": 2425298.75
},{
"acronym": "Konetik eLCV",
"contentUpdateDate": "2022-08-10 09:21:56",
"ecMaxContribution": 50000,
"ecSignatureDate": "2018-11-29",
"endDate": "2019-01-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-1",
"grantDoi": "10.3030/837614",
"id": 837614,
"legalBasis": "H2020-EU.2.3.",
"masterCall": "H2020-EIC-SMEInst-2018-2020",
"nature": "",
"objective": "Light Commercial vehicle fleets are important for the EV adoption A LCV is a business tool, so the utilisation rate and ensuring business continuity are key. Integrating and managing electric LCV is challenging due to the limited driving range and charging infrastructure.\n\nIn this project, our aim is to make a feasibility study of developing the first AI based charging assistant for Light Commercial Vehicle fleets. As part of the project aim is to research into the technical feasibility of analyzing vehicle charging data from the electric LCVs and combine that with consumption data from public, home and office chargers to ensure business continuity of eLCV fleets and save money on charging and reducing idle time.\n\nAccording to the IEA, EV/HEVs stock is projected to reach 200 Million units by 2030. The total EV/HEV market is expected to grow up 233EUR bn by 2021 growing at a 40.65%\n\nThe project will allow us to facilitate the market spread of eLCVs with the first machine learning based smart charging assistant tool based on our unique algorithm that combines advanced energy management and telematics. This will imply to disrupt into the European and international market by saving significant money on eLCV charging and reducing downtimes for our client while generating 5,1 M€ profit until 2022 and a generation of 42 new direct jobs on the company level for Konetik.\n\nKonetik is a telematics company focusing on products helping the widespread of electric vehicles. Konetik serves 300+ companies 3 energy utilities already engaged (NKM, ENGIE, EnBW) regarding a pilot program. Selected as one of the top 100 Berlin based startups",
"rcn": 219747,
"startDate": "2018-11-01",
"status": "CLOSED",
"subCall": "H2020-SMEInst-2018-2020-1",
"title": "Artificial Intelligence based Smart Charging Assistant for Electric Light Commercial Vehicle Fleets",
"topics": "EIC-SMEInst-2018-2020",
"totalCost": 71429
},{
"acronym": "INSENSION",
"contentUpdateDate": "2022-09-04 01:10:17",
"ecMaxContribution": 2255875,
"ecSignatureDate": "2017-11-07",
"endDate": "2021-10-31",
"frameworkProgramme": "H2020",
"fundingScheme": "RIA",
"grantDoi": "10.3030/780819",
"id": 780819,
"legalBasis": "H2020-EU.2.1.1.",
"masterCall": "H2020-ICT-2016-2017",
"nature": "",
"objective": "The INSENSION project will create an ICT platform that enables persons with profound and multiple learning disabilities (PMLD) to use digital applications and services that can enhance the quality of their lives, increase their ability to self-determination and enrich their lives. The target end users of the proposed solution are capable of using only nonconventional, nonsymbolic means of interaction with their environment. Therefore, the platform aims to provide technological means for seamless, and adaptable recognition of a range of highly individual nonsymbolic behavioral signals of people with PMLD to detect behavioral patterns happening in the context of specific situations. These patterns are translated into the affective intents of the end user (their approval or disapproval to the given situation) and allow to communicate them to assistive services. This way an individual with PMLD gains a possibility to seamlessly influence their living environment, through new means of communication with other people, changing conditions of their environment or use new types of assistive digital applications. The project employs recent advances in a range of ICT disciplines equipping the proposed assistive ICT platform with natural behavior recognition mechanisms based on gesture, facial expression and vocalization recognition technologies. This is complemented by novel techniques of artificial intelligence and state-of-the-art Internet of Things models. The research and development of the project is conducted within the inclusive design paradigm, with individual with PMLD and their caregivers directly participating in the R+D process throughout the whole duration of the project. This process links a highly interdisciplinary team of experts of ICT specialists and researchers and practitioners of disability studies and care, with due participation of an assistive technology industry representatives.",
"rcn": 213171,
"startDate": "2018-01-01",
"status": "SIGNED",
"subCall": "H2020-ICT-2017-1",
"title": "Personalized intelligent platform enabling interaction with digital services to individuals with profound and multiple learning disabilities",
"topics": "ICT-23-2017",
"totalCost": 2255875
},{
"acronym": "MANET",
"contentUpdateDate": "2022-06-13 17:36:10",
"ecMaxContribution": 171473.28,
"ecSignatureDate": "2021-04-30",
"endDate": "2024-06-30",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-ST",
"grantDoi": "10.3030/101033173",
"id": 101033173,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2020",
"nature": "",
"objective": "Curbing greenhouse gas emissions is a challenge of the utmost importance for our society future and requires urgent decisions on the implementation of clear-cut climate economic policies. Integrated Assessment Models (IAMs) allow to explore alternative energy scenarios in the next 30-70 years. They are key to support the design of climate policies as they highlight the nexus between climate modelling, social science, and energy systems. However, the use of IAMs to inform climate policies does not come free of controversial aspects. Primarily, the inherent uncertainty of IAMs long-term outputs has created several difficulties for the integration of the modelling insights in the policy design. Modelling outputs diverge across IAMs models quite dramatically when they are asked for example to quantify the uptake of key technologies for the decarbonisation, such as renewables and carbon capture and storage. Uncertainty in IAMs descends from lack of knowledge of the future and from IAMs incomplete representations of the future. Uncertainty cannot be removed, but reduced, understood, and conveyed appropriately to policy makers to avoid that different projections cause delayed actions. \nThis project aims to fill this gap providing a methodology which defines the sources of uncertainty, either due to IAMs inputs or IAMs structure, and quantify their relative importance. The methodology will be embodied in an emulator of IAMs, MANET (the eMulAtor of iNtegratAd assEssmenT models) formulated using machine learning techniques to reproduce IAMs outputs. The project will provide a proof of concept of MANET focusing on the uptake of key decarbonisation technologies. The emulator will provide a simplified version of the IAM outputs as a response surface of the model to any variation of the inputs. MANET will be a flexible tool for policy makers and scientists for a direct comparison of IAMs with no limitation of the solution domain.",
"rcn": 235834,
"startDate": "2022-07-01",
"status": "SIGNED",
"subCall": "H2020-MSCA-IF-2020",
"title": "Climate economic policies: assessing values and costs of uncertainty in energy scenarios",
"topics": "MSCA-IF-2020",
"totalCost": 171473.28
},{
"acronym": "PRINTOUT",
"contentUpdateDate": "2022-11-12 14:18:08",
"ecMaxContribution": 183473.28,
"ecSignatureDate": "2020-04-21",
"endDate": "2022-06-14",
"frameworkProgramme": "H2020",
"fundingScheme": "MSCA-IF-EF-ST",
"grantDoi": "10.3030/892757",
"id": 892757,
"legalBasis": "H2020-EU.1.3.",
"masterCall": "H2020-MSCA-IF-2019",
"nature": "",
"objective": "With the extensive range of document generation devices nowadays, the establishment of computational techniques to find manipulation, detect illegal copies and link documents to their source are useful because (i) finding manipulation can help to detect fake news and manipulated documents; (ii) exposing illegal copies can avoid frauds and copyright violation; and (iii) indicating the owner of an illegal document can provide strong arguments to the prosecution of a suspect. Different machine learning techniques have been proposed in the scientific literature to act in these problems, but many of them are limited as: (i) there is a lack of methodology, which may require different experts to solve different problems; (ii) the limited range of known elements being considered for multi-class classification problems such as source attribution, which do not consider unknown classes in a real-world testing; and (iii) they dont consider adversarial attacks from an experienced forger. In this research project, we propose to address these problems on two fronts: resilient characterization and classification. In the characterization front, we intend to use multi-analysis approaches. Proposed by the candidate in his Ph.D. research, it is a methodology to fuse/ensemble machine learning approaches by considering several investigative scenarios, creating robust classifiers that minimize the risk of attacks. Additionally, we aim at proposing the use of open-set classifiers, which are trained to avoid misclassification of classes not included in the classifier training. We envision solutions to several printed document forensics applications with this setup: source attribution, forgery of documents and illegal copies detection. All the approaches we aim at creating in this project will be done in partnership with a document authentication company, which will provide real-world datasets and new applications.",
"rcn": 229161,
"startDate": "2020-06-15",
"status": "CLOSED",
"subCall": "H2020-MSCA-IF-2019",
"title": "Printed Documents Authentication",
"topics": "MSCA-IF-2019",
"totalCost": 183473.28
},{
"acronym": "SKIDLESS",
"contentUpdateDate": "2022-08-16 00:57:32",
"ecMaxContribution": 50000,
"ecSignatureDate": "2019-01-21",
"endDate": "2019-07-31",
"frameworkProgramme": "H2020",
"fundingScheme": "SME-1",
"grantDoi": "10.3030/855496",
"id": 855496,
"legalBasis": "H2020-EU.2.3.",
"masterCall": "H2020-EIC-SMEInst-2018-2020",
"nature": "",
"objective": "When we drive, our safety is protected by a set of technologies that silently watch over the cars behaviour, intervening to\nminimise the risk of accidents. The Electronic Stability Control (ESC) is by far the most impactful safety technology in cars,\nhaving reduced by around 40% the number of fatal accidents caused by the vehicles loss of control. Although effective, any\nESC on the market suffer from one significant flaw: it cannot directly measure the sideslip angle, which is the key indicator of\nskidding, namely the situation when the car deviates from the drivers intended direction. The result is that present ESC can\ndetect up to 80% of skidding events, thus still leaving room for improvements that can save lives. To address this issue and\ncatch a huge market opportunity, Modelway has developed a machine learning technology able to accurately estimate the\nvehicles sideslip angle in real time. And without adding any new sensor to the car. The key to obtain this result is the\nproprietary and patented Direct Virtual Sensor technology, which can be embedded in standard ESC units to further improve\nthe vehicles capacity to detect a skidding event. The DVS technology has been prototyped and extensive tests have been\ncarried out with car manufacturers and their Tier-1 suppliers, showing that the performances are already in line with the\nexpectations of a highly regulated industry as automotive. Now the development roadmap focuses on understanding the\nfeasibility of the integration of the DVS technology in commercial ESC units (Phase 1), to enable a co-development effort\nwith global ESC manufacturers (e.g. Bosch, Magneti Marelli) leading to a pre-commercial validation test-bed (Phase 2). In\nterms of business potential, with around 100 million cars sold each year globally and around 50 in Europe and the US where\nthe use of ESC is mandatory since 2014, we target more than 4 million DSV installed in cars by 2025, leading to more than\n28 M€ of revenues.",
"rcn": 220470,
"startDate": "2019-02-01",
"status": "CLOSED",
"subCall": "H2020-SMEInst-2018-2020-1",
"title": "Enhancing car safety through accurate and real time side-slip angle assessment",
"topics": "EIC-SMEInst-2018-2020",
"totalCost": 71429
},{
"acronym": "Z-Fact0r",
"contentUpdateDate": "2022-08-18 09:44:24",
"ecMaxContribution": 4206252.88,
"ecSignatureDate": "2016-08-09",
"endDate": "2020-03-31",
"frameworkProgramme": "H2020",
"fundingScheme": "IA",
"grantDoi": "10.3030/723906",
"id": 723906,
"legalBasis": "H2020-EU.2.1.5.",
"masterCall": "H2020-IND-CE-2016-17",
"nature": "",
"objective": "Manufacturing represents approximately 21 % of the EUs GDP and 20 % of its employment, providing more than 30 million jobs in 230 000 enterprises, mostly SMEs. Moreover, each job in industry is considered to be linked to two more in related services. European manufacturing is also a dominant element in international trade, leading the world in areas such as automotive, machinery and agricultural engineering. Already threatened by both the lower-wage economies and other high-tech rivals, the situation of EU companies was even made more difficult by the downturn.\nThe Z-Fact0r consortium has conducted an extensive state-of-the-art research (see section 1.4) and realised that although a number of activities (see section 1.3) have been trying to address the need for zero-defect manufacturing, still there is a vast business opportunity for innovative, high-ROI (Return on Investment) solutions to ensure, better quality and higher productivity in the European manufacturing industries.\nThe Z-Fact0r solution comprises the introduction of five (5) multi-stage production-based strategies targeting (i) the early detection of the defect (Z-DETECT), (ii) the prediction of the defect generation (Z-PREDICT), (iii) the prevention of defect generation by recalibrating the production line (multi-stage), as well as defect propagation in later stages of the production (Z-PREVENT), (iv) the reworking/remanufacturing of the product, if this is possible, using additive and subtractive manufacturing techniques (Z-REPAIR) and (v) the management of the aforementioned strategies through event modelling, KPI (key performance indicators) monitoring and real-time decision support (Z-MANAGE).\nTo do that we have brought together a total of thirteen (13) EU-based partners, representing both industry and academia, having ample experience in cutting-edge technologies and active presence in the EU manufacturing.",
"rcn": 205465,
"startDate": "2016-10-01",
"status": "CLOSED",
"subCall": "H2020-FOF-2016",
"title": "Zero-defect manufacturing strategies towards on-line production management for European factories",
"topics": "FOF-03-2016",
"totalCost": 6063018.75
}]

View File

@ -1,16 +0,0 @@
{"id":"894593","programme":"H2020-EU.3.4.7.","topics":"SESAR-ER4-31-2019"}
{"id":"897004","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"896300","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"892890","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"886828","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"886776","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D4"}
{"id":"895426","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"898218","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"893787","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"896189","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"891624","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"887259","programme":"H2020-EU.2.1.4.;H2020-EU.3.2.6.","topics":"BBI-2019-SO3-D3"}
{"id":"892834","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"895716","programme":"H2020-EU.1.3.2.","topics":"MSCA-IF-2019"}
{"id":"954782","programme":"H2020-EU.3.;H2020-EU.2.3.;H2020-EU.2.1.","topics":"EIC-SMEInst-2018-2020"}
{"id":"101003374","programme":"H2020-EU.4.","topics":"WF-02-2019"}

View File

@ -0,0 +1,9 @@
{"id":"d1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":5}
{"id":"d11_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"d11_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"d11_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"d12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":6}
{"id":"d12_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"d12_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"d13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":10}
{"id":"d13_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -0,0 +1,12 @@
{"id":"d1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":4}
{"id":"d1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":1}
{"id":"d11_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"d11_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"d11_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"d12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":3}
{"id":"d12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":0,"views":3}
{"id":"d12_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"d12_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"d13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":8}
{"id":"d13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":0,"views":2}
{"id":"d13_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -0,0 +1,9 @@
{"id":"f1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":5}
{"id":"f11_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"f11_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"f11_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"f12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":6}
{"id":"f12_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"f12_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"f13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":10}
{"id":"f13_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -0,0 +1,12 @@
{"id":"f1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":4}
{"id":"f1__________::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":1}
{"id":"f11_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"f11_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"f11_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"f12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":3}
{"id":"f12_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":0,"views":3}
{"id":"f12_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"f12_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"f13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":8}
{"id":"f13_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":0,"views":2}
{"id":"f13_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -0,0 +1,9 @@
{"id":"dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":5}
{"id":"doi_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"doi_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"doi_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"doi_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":6}
{"id":"doi_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"doi_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"doi_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":10}
{"id":"doi_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -0,0 +1,12 @@
{"id":"dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":4}
{"id":"dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":1}
{"id":"doi_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"id":"doi_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"id":"doi_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"id":"doi_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":3}
{"id":"doi_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":0,"views":3}
{"id":"doi_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"id":"doi_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"id":"doi_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":8}
{"id":"doi_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":0,"views":2}
{"id":"doi_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -1,12 +0,0 @@
{"result_id":"dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":4}
{"result_id":"dedup_wf_001::53575dc69e9ace947e02d47ecd54a7a6","downloads":0,"views":1}
{"result_id":"doi_________::17eda2ff77407538fbe5d3d719b9d1c0","downloads":0,"views":1}
{"result_id":"doi_________::1d4dc08605fd0a2be1105d30c63bfea1","downloads":1,"views":3}
{"result_id":"doi_________::2e3527822854ca9816f6dfea5bff61a8","downloads":1,"views":1}
{"result_id":"doi_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":2,"views":3}
{"result_id":"doi_________::3085e4c6e051378ca6157fe7f0430c1f","downloads":0,"views":3}
{"result_id":"doi_________::33f710e6dd30cc5e67e35b371ddc33cf","downloads":0,"views":1}
{"result_id":"doi_________::39738ebf10654732dd3a7af9f24655f8","downloads":1,"views":3}
{"result_id":"doi_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":1,"views":8}
{"result_id":"doi_________::3c3b65f07c1a06c7894397eda1d11bbf","downloads":0,"views":2}
{"result_id":"doi_________::4938a71a884dd481d329657aa543b850","downloads":0,"views":3}

View File

@ -37,12 +37,24 @@ public class SubscriptionUtils {
}
public static boolean verifyDateRange(final long date, final String min, final String max) {
long from = 0;
long to = Long.MAX_VALUE;
try {
return date >= DateUtils.parseDate(min, "yyyy-MM-dd").getTime()
&& date < DateUtils.parseDate(max, "yyyy-MM-dd").getTime() + ONE_DAY;
from = min != null ? DateUtils.parseDate(min, "yyyy-MM-dd").getTime() : 0;
} catch (final ParseException e) {
return false;
from = 0;
}
try {
to = max != null ? DateUtils.parseDate(max, "yyyy-MM-dd").getTime() + ONE_DAY : Long.MAX_VALUE;
} catch (final ParseException e) {
to = Long.MAX_VALUE;
}
return date >= from && date < to;
}
public static boolean verifyExact(final String s1, final String s2) {

View File

@ -41,6 +41,18 @@ public class SubscriptionUtilsTest {
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", "2011-01-01"));
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", "2021-01-01"));
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", "NULL"));
assertTrue(SubscriptionUtils.verifyDateRange(date, "2010-01-01", null));
assertTrue(SubscriptionUtils.verifyDateRange(date, "NULL", "2011-01-01"));
assertTrue(SubscriptionUtils.verifyDateRange(date, null, "2011-01-01"));
assertTrue(SubscriptionUtils.verifyDateRange(date, "NULL", "NULL"));
assertTrue(SubscriptionUtils.verifyDateRange(date, null, null));
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", null));
assertFalse(SubscriptionUtils.verifyDateRange(date, "2020-01-01", "NULL"));
assertFalse(SubscriptionUtils.verifyDateRange(date, null, "2005-01-01"));
assertFalse(SubscriptionUtils.verifyDateRange(date, "NULL", "2005-01-01"));
}
@Test

View File

@ -42,7 +42,7 @@ public class RelationAggregator extends Aggregator<Relation, Relation, Relation>
return b;
}
return MergeUtils.mergeRelation(b, a);
return MergeUtils.merge(b, a);
}
@Override

View File

@ -24,6 +24,8 @@ import eu.dnetlib.dhp.utils.DHPUtils;
import eu.dnetlib.doiboost.orcidnodoi.util.DumpToActionsUtility;
import eu.dnetlib.doiboost.orcidnodoi.util.Pair;
import javax.jws.WebParam;
/**
* This class converts an orcid publication from json format to oaf
*/
@ -128,16 +130,15 @@ public class PublicationToOaf implements Serializable {
Publication publication = new Publication();
final DataInfo dataInfo = new DataInfo();
final EntityDataInfo dataInfo = new EntityDataInfo();
dataInfo.setDeletedbyinference(false);
dataInfo.setInferred(false);
dataInfo.setTrust("0.9");
dataInfo.setTrust(.9f);
dataInfo
.setProvenanceaction(
mapQualifier(
OafMapperUtils.qualifier(
ModelConstants.SYSIMPORT_ORCID_NO_DOI,
ModelConstants.SYSIMPORT_ORCID_NO_DOI,
ModelConstants.DNET_PROVENANCE_ACTIONS,
ModelConstants.DNET_PROVENANCE_ACTIONS));
publication.setDataInfo(dataInfo);
@ -159,20 +160,14 @@ public class PublicationToOaf implements Serializable {
.getExternalReference()
.add(
convertExtRef(
extId, classid, classname, ModelConstants.DNET_PID_TYPES,
ModelConstants.DNET_PID_TYPES));
extId, classid, classname, ModelConstants.DNET_PID_TYPES));
}
});
// Adding source
final String source = getStringValue(rootElement, "sourceName");
if (StringUtils.isNotBlank(source)) {
Field<String> sourceField = mapStringField(source, null);
if (sourceField == null) {
publication.setSource(null);
} else {
publication.setSource(Arrays.asList(sourceField));
}
publication.setSource(Arrays.asList(source));
}
// Adding titles
@ -193,7 +188,7 @@ public class PublicationToOaf implements Serializable {
.setTitle(
titles
.stream()
.map(t -> mapStructuredProperty(t, ModelConstants.MAIN_TITLE_QUALIFIER, null))
.map(t -> mapStructuredProperty(t, ModelConstants.MAIN_TITLE_QUALIFIER))
.filter(Objects::nonNull)
.collect(Collectors.toList()));
// Adding identifier
@ -220,8 +215,8 @@ public class PublicationToOaf implements Serializable {
if (StringUtils.isNotBlank(type)) {
publication
.setResourcetype(
mapQualifier(
type, type, ModelConstants.DNET_DATA_CITE_RESOURCE, ModelConstants.DNET_DATA_CITE_RESOURCE));
OafMapperUtils.qualifier(
type, type, ModelConstants.DNET_DATA_CITE_RESOURCE));
Map<String, String> publicationType = typologiesMapping.get(type);
if ((publicationType == null || publicationType.isEmpty()) && errorsInvalidType != null) {
@ -260,7 +255,7 @@ public class PublicationToOaf implements Serializable {
final String pubDate = getPublicationDate(rootElement, "publicationDates");
if (StringUtils.isNotBlank(pubDate)) {
instance.setDateofacceptance(mapStringField(pubDate, null));
instance.setDateofacceptance(pubDate);
}
instance.setCollectedfrom(createCollectedFrom());
@ -270,15 +265,13 @@ public class PublicationToOaf implements Serializable {
.setAccessright(
OafMapperUtils
.accessRight(
ModelConstants.UNKNOWN, "Unknown", ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES));
ModelConstants.UNKNOWN, "Unknown", ModelConstants.DNET_ACCESS_MODES));
// Adding type
instance
.setInstancetype(
mapQualifier(
cobjValue, typeValue, ModelConstants.DNET_PUBLICATION_RESOURCE,
ModelConstants.DNET_PUBLICATION_RESOURCE));
OafMapperUtils.qualifier(
cobjValue, typeValue, ModelConstants.DNET_PUBLICATION_RESOURCE));
publication.setInstance(Arrays.asList(instance));
} else {
@ -313,12 +306,7 @@ public class PublicationToOaf implements Serializable {
return null;
}
}
String classValue = getDefaultResulttype(cobjValue);
publication
.setResulttype(
mapQualifier(
classValue, classValue, ModelConstants.DNET_RESULT_TYPOLOGIES,
ModelConstants.DNET_RESULT_TYPOLOGIES));
publication.setResulttype(getDefaultResulttype(cobjValue));
if (enrichedPublications != null) {
enrichedPublications.add(1);
}
@ -422,16 +410,15 @@ public class PublicationToOaf implements Serializable {
final String pubDate = getPublicationDate(rootElement, "publication_date");
if (StringUtils.isNotBlank(pubDate)) {
if (addToDateOfAcceptance) {
publication.setDateofacceptance(mapStringField(pubDate, null));
publication.setDateofacceptance(pubDate);
}
Qualifier q = mapQualifier(
dictionaryKey, dictionaryKey, ModelConstants.DNET_DATACITE_DATE, ModelConstants.DNET_DATACITE_DATE);
Qualifier q = OafMapperUtils.qualifier(dictionaryKey, dictionaryKey, ModelConstants.DNET_DATACITE_DATE);
publication
.setRelevantdate(
Arrays
.asList(pubDate)
.stream()
.map(r -> mapStructuredProperty(r, q, null))
.map(r -> mapStructuredProperty(r, q))
.filter(Objects::nonNull)
.collect(Collectors.toList()));
}
@ -511,44 +498,22 @@ public class PublicationToOaf implements Serializable {
return true;
}
private Qualifier mapQualifier(String classId, String className, String schemeId, String schemeName) {
final Qualifier qualifier = new Qualifier();
qualifier.setClassid(classId);
qualifier.setClassname(className);
qualifier.setSchemeid(schemeId);
qualifier.setSchemename(schemeName);
return qualifier;
}
private ExternalReference convertExtRef(String extId, String classId, String className, String schemeId,
String schemeName) {
private ExternalReference convertExtRef(String extId, String classId, String className, String schemeId) {
ExternalReference ex = new ExternalReference();
ex.setRefidentifier(extId);
ex.setQualifier(mapQualifier(classId, className, schemeId, schemeName));
ex.setQualifier(OafMapperUtils.qualifier(classId, className, schemeId));
return ex;
}
private StructuredProperty mapStructuredProperty(String value, Qualifier qualifier, DataInfo dataInfo) {
private StructuredProperty mapStructuredProperty(String value, Qualifier qualifier) {
if (value == null || StringUtils.isBlank(value)) {
return null;
}
final StructuredProperty structuredProperty = new StructuredProperty();
structuredProperty.setValue(value);
structuredProperty.setQualifier(qualifier);
structuredProperty.setDataInfo(dataInfo);
return structuredProperty;
}
private Field<String> mapStringField(String value, DataInfo dataInfo) {
if (value == null || StringUtils.isBlank(value)) {
return null;
}
final Field<String> stringField = new Field<>();
stringField.setValue(value);
stringField.setDataInfo(dataInfo);
return stringField;
final StructuredProperty sp = new StructuredProperty();
sp.setValue(value);
sp.setQualifier(qualifier);
return sp;
}
private KeyValue createCollectedFrom() {
@ -562,27 +527,19 @@ public class PublicationToOaf implements Serializable {
return ModelConstants.UNKNOWN_REPOSITORY;
}
private StructuredProperty mapAuthorId(String orcidId) {
final StructuredProperty sp = new StructuredProperty();
sp.setValue(orcidId);
final Qualifier q = new Qualifier();
q.setClassid(ModelConstants.ORCID);
q.setClassname(ModelConstants.ORCID_CLASSNAME);
q.setSchemeid(ModelConstants.DNET_PID_TYPES);
q.setSchemename(ModelConstants.DNET_PID_TYPES);
sp.setQualifier(q);
final DataInfo dataInfo = new DataInfo();
dataInfo.setDeletedbyinference(false);
dataInfo.setInferred(false);
dataInfo.setTrust("0.91");
dataInfo
.setProvenanceaction(
mapQualifier(
ModelConstants.SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
ModelConstants.HARVESTED,
ModelConstants.DNET_PROVENANCE_ACTIONS,
ModelConstants.DNET_PROVENANCE_ACTIONS));
sp.setDataInfo(dataInfo);
return sp;
private AuthorPid mapAuthorId(String orcidId) {
return OafMapperUtils.authorPid(
orcidId,
OafMapperUtils.qualifier(
ModelConstants.ORCID,
ModelConstants.ORCID_CLASSNAME,
ModelConstants.DNET_PID_TYPES),
OafMapperUtils.dataInfo(.91f,
null,
false,
OafMapperUtils.qualifier(
ModelConstants.SYSIMPORT_CROSSWALK_ENTITYREGISTRY,
ModelConstants.HARVESTED,
ModelConstants.DNET_PROVENANCE_ACTIONS)));
}
}

View File

@ -210,7 +210,6 @@ object DoiBoostMappingUtil {
OafMapperUtils.accessRight(
ModelConstants.ACCESS_RIGHT_OPEN,
"Open Access",
ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES
)
}
@ -219,7 +218,6 @@ object DoiBoostMappingUtil {
OafMapperUtils.accessRight(
"RESTRICTED",
"Restricted",
ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES
)
}
@ -228,7 +226,6 @@ object DoiBoostMappingUtil {
OafMapperUtils.accessRight(
ModelConstants.UNKNOWN,
ModelConstants.NOT_AVAILABLE,
ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES
)
}
@ -237,7 +234,6 @@ object DoiBoostMappingUtil {
OafMapperUtils.accessRight(
"EMBARGO",
"Embargo",
ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES
)
}
@ -246,11 +242,39 @@ object DoiBoostMappingUtil {
OafMapperUtils.accessRight(
"CLOSED",
"Closed Access",
ModelConstants.DNET_ACCESS_MODES,
ModelConstants.DNET_ACCESS_MODES
)
}
val entityDataInfo = generateEntityDataInfo()
def generateEntityDataInfo(): EntityDataInfo = {
OafMapperUtils.dataInfo(
false,
false,
.9f,
null,
false,
OafMapperUtils.qualifier(
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.DNET_PROVENANCE_ACTIONS
))
}
val dataInfo = generateDataInfo()
def generateDataInfo(): DataInfo = {
OafMapperUtils.dataInfo(
.9f,
null,
false,
OafMapperUtils.qualifier(
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.DNET_PROVENANCE_ACTIONS
))
}
def extractInstance(r: Result): Option[Instance] = {
r.getInstance()
.asScala
@ -303,10 +327,6 @@ object DoiBoostMappingUtil {
s"10|${b}::${DHPUtils.md5(a)}"
}
def generateDataInfo(): DataInfo = {
generateDataInfo(0.9F)
}
def filterPublication(publication: Publication): Boolean = {
//Case empty publication
@ -373,23 +393,6 @@ object DoiBoostMappingUtil {
true
}
def generateDataInfo(trust: Float): DataInfo = {
val di = new EntityDataInfo
di.setDeletedbyinference(false)
di.setInferred(false)
di.setInvisible(false)
di.setTrust(trust)
di.setProvenanceaction(
OafMapperUtils.qualifier(
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.SYSIMPORT_ACTIONSET,
ModelConstants.DNET_PROVENANCE_ACTIONS
)
)
di
}
def createSubject(value: String, classId: String, schemeId: String): Subject = {
val s = new Subject
s.setQualifier(OafMapperUtils.qualifier(classId, classId, schemeId))
@ -433,7 +436,7 @@ object DoiBoostMappingUtil {
sp
}
val collectedFrom = createCrossrefCollectedFrom()
def createCrossrefCollectedFrom(): KeyValue = {
val cf = new KeyValue

View File

@ -3,17 +3,19 @@ package eu.dnetlib.doiboost
import eu.dnetlib.dhp.application.ArgumentApplicationParser
import eu.dnetlib.dhp.oa.merge.AuthorMerger
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf.utils.{MergeUtils, OafMapperUtils}
import eu.dnetlib.dhp.schema.oaf.{Organization, Publication, Relation, Dataset => OafDataset}
import eu.dnetlib.doiboost.mag.ConversionUtil
import eu.dnetlib.doiboost.DoiBoostMappingUtil._
import org.apache.commons.io.IOUtils
import org.apache.spark.SparkConf
import org.apache.spark.sql.expressions.Aggregator
import org.apache.spark.sql.functions.col
import org.apache.spark.sql._
import org.json4s.DefaultFormats
import org.json4s.JsonAST.{JField, JObject, JString}
import org.json4s.jackson.JsonMethods.parse
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConverters._
object SparkGenerateDoiBoost {
@ -74,11 +76,11 @@ object SparkGenerateDoiBoost {
spark.read.load(s"$workingDirPath/uwPublication").as[Publication].map(p => (p.getId, p))
def applyMerge(item: ((String, Publication), (String, Publication))): Publication = {
val crossrefPub = item._1._2
var crossrefPub = item._1._2
if (item._2 != null) {
val otherPub = item._2._2
if (otherPub != null) {
crossrefPub.mergeFrom(otherPub)
crossrefPub = MergeUtils.merge(crossrefPub, otherPub)
crossrefPub.setAuthor(AuthorMerger.mergeAuthor(crossrefPub.getAuthor, otherPub.getAuthor))
}
}
@ -117,16 +119,16 @@ object SparkGenerateDoiBoost {
val doiBoostPublication: Dataset[(String, Publication)] = spark.read
.load(s"$workingDirPath/doiBoostPublication")
.as[Publication]
.filter(p => DoiBoostMappingUtil.filterPublication(p))
.map(DoiBoostMappingUtil.toISSNPair)(tupleForJoinEncoder)
.filter(p => filterPublication(p))
.map(toISSNPair)(tupleForJoinEncoder)
val hostedByDataset: Dataset[(String, HostedByItemType)] = spark.createDataset(
spark.sparkContext.textFile(hostedByMapPath).map(DoiBoostMappingUtil.toHostedByItem)
spark.sparkContext.textFile(hostedByMapPath).map(toHostedByItem)
)
doiBoostPublication
.joinWith(hostedByDataset, doiBoostPublication("_1").equalTo(hostedByDataset("_1")), "left")
.map(DoiBoostMappingUtil.fixPublication)
.map(fixPublication)
.map(p => (p.getId, p))
.groupByKey(_._1)
.reduceGroups((left, right) => {
@ -138,10 +140,9 @@ object SparkGenerateDoiBoost {
else {
// Here Left and Right are not null
// So we have to merge
val b1 = left._2
var b1 = left._2
val b2 = right._2
b1.mergeFrom(b2)
b1.mergeOAFDataInfo(b2)
b1 = MergeUtils.mergeProject(b1, b2)
val authors = AuthorMerger.mergeAuthor(b1.getAuthor, b2.getAuthor)
b1.setAuthor(authors)
if (b2.getId != null && b2.getId.nonEmpty)
@ -198,24 +199,16 @@ object SparkGenerateDoiBoost {
val affId: String =
if (affiliation.GridId.isDefined)
s"unresolved::grid::${affiliation.GridId.get.toLowerCase}"
else DoiBoostMappingUtil.generateMAGAffiliationId(affiliation.AffiliationId.toString)
else generateMAGAffiliationId(affiliation.AffiliationId.toString)
val r: Relation = new Relation
r.setSource(pub.getId)
r.setTarget(affId)
r.setRelType(ModelConstants.RESULT_ORGANIZATION)
r.setRelClass(ModelConstants.HAS_AUTHOR_INSTITUTION)
r.setSubRelType(ModelConstants.AFFILIATION)
r.setDataInfo(pub.getDataInfo)
r.setCollectedfrom(List(DoiBoostMappingUtil.createMAGCollectedFrom()).asJava)
val r1: Relation = new Relation
r1.setTarget(pub.getId)
r1.setSource(affId)
r1.setRelType(ModelConstants.RESULT_ORGANIZATION)
r1.setRelClass(ModelConstants.IS_AUTHOR_INSTITUTION_OF)
r1.setSubRelType(ModelConstants.AFFILIATION)
r1.setDataInfo(pub.getDataInfo)
r1.setCollectedfrom(List(DoiBoostMappingUtil.createMAGCollectedFrom()).asJava)
List(r, r1)
r.setProvenance(OafMapperUtils.getProvenance(pub.getCollectedfrom, dataInfo))
List(r)
})(mapEncoderRel)
.write
.mode(SaveMode.Overwrite)
@ -265,14 +258,14 @@ object SparkGenerateDoiBoost {
val affiliation = item._2
if (affiliation.GridId.isEmpty) {
val o = new Organization
o.setCollectedfrom(List(DoiBoostMappingUtil.createMAGCollectedFrom()).asJava)
o.setDataInfo(DoiBoostMappingUtil.generateDataInfo())
o.setId(DoiBoostMappingUtil.generateMAGAffiliationId(affiliation.AffiliationId.toString))
o.setCollectedfrom(List(createMAGCollectedFrom()).asJava)
o.setDataInfo(entityDataInfo)
o.setId(generateMAGAffiliationId(affiliation.AffiliationId.toString))
o.setOriginalId(List(affiliation.AffiliationId.toString).asJava)
if (affiliation.DisplayName.nonEmpty)
o.setLegalname(DoiBoostMappingUtil.asField(affiliation.DisplayName.get))
o.setLegalname(affiliation.DisplayName.get)
if (affiliation.OfficialPage.isDefined)
o.setWebsiteurl(DoiBoostMappingUtil.asField(affiliation.OfficialPage.get))
o.setWebsiteurl(affiliation.OfficialPage.get)
o.setCountry(ModelConstants.UNKNOWN_COUNTRY)
o
} else

View File

@ -1,8 +1,9 @@
package eu.dnetlib.doiboost.crossref
import com.google.common.collect.Lists
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf._
import eu.dnetlib.dhp.schema.oaf.utils.{GraphCleaningFunctions, IdentifierFactory, OafMapperUtils}
import eu.dnetlib.dhp.schema.oaf.utils.{GraphCleaningFunctions, IdentifierFactory, OafMapperUtils, PidType}
import eu.dnetlib.dhp.utils.DHPUtils
import eu.dnetlib.doiboost.DoiBoostMappingUtil
import eu.dnetlib.doiboost.DoiBoostMappingUtil._
@ -105,17 +106,17 @@ case object Crossref2Oaf {
result.setOriginalId(originalIds)
// Add DataInfo
result.setDataInfo(generateDataInfo())
result.setDataInfo(entityDataInfo)
result.setLastupdatetimestamp((json \ "indexed" \ "timestamp").extract[Long])
result.setDateofcollection((json \ "indexed" \ "date-time").extract[String])
result.setCollectedfrom(List(createCrossrefCollectedFrom()).asJava)
result.setCollectedfrom(List(collectedFrom).asJava)
// Publisher ( Name of work's publisher mapped into Result/Publisher)
val publisher = (json \ "publisher").extractOrElse[String](null)
if (publisher != null && publisher.nonEmpty)
result.setPublisher(asField(publisher))
result.setPublisher(OafMapperUtils.publisher(publisher))
// TITLE
val mainTitles =
@ -140,13 +141,13 @@ case object Crossref2Oaf {
// DESCRIPTION
val descriptionList =
for { JString(description) <- json \ "abstract" } yield asField(description)
for { JString(description) <- json \ "abstract" } yield description
result.setDescription(descriptionList.asJava)
// Source
val sourceList = for {
JString(source) <- json \ "source" if source != null && source.nonEmpty
} yield asField(source)
} yield source
result.setSource(sourceList.asJava)
//RELEVANT DATE Mapping
@ -186,9 +187,9 @@ case object Crossref2Oaf {
(json \ "issued" \ "date-parts").extract[List[List[Int]]]
)
if (StringUtils.isNotBlank(issuedDate)) {
result.setDateofacceptance(asField(issuedDate))
result.setDateofacceptance(issuedDate)
} else {
result.setDateofacceptance(asField(createdDate.getValue))
result.setDateofacceptance(createdDate.getValue)
}
result.setRelevantdate(
List(createdDate, postedDate, acceptedDate, publishedOnlineDate, publishedPrintDate)
@ -223,8 +224,8 @@ case object Crossref2Oaf {
JObject(license) <- json \ "license"
JField("URL", JString(lic)) <- license
JField("content-version", JString(content_version)) <- license
} yield (asField(lic), content_version)
val l = license.filter(d => StringUtils.isNotBlank(d._1.getValue))
} yield (OafMapperUtils.license(lic), content_version)
val l = license.filter(d => StringUtils.isNotBlank(d._1.getUrl))
if (l.nonEmpty) {
if (l exists (d => d._2.equals("vor"))) {
for (d <- l) {
@ -247,20 +248,18 @@ case object Crossref2Oaf {
OafMapperUtils.qualifier(
"0001",
"peerReviewed",
ModelConstants.DNET_REVIEW_LEVELS,
ModelConstants.DNET_REVIEW_LEVELS
)
)
}
instance.setAccessright(
decideAccessRight(instance.getLicense, result.getDateofacceptance.getValue)
decideAccessRight(instance.getLicense.getUrl, result.getDateofacceptance)
)
instance.setInstancetype(
OafMapperUtils.qualifier(
cobjCategory.substring(0, 4),
cobjCategory.substring(5),
ModelConstants.DNET_PUBLICATION_RESOURCE,
ModelConstants.DNET_PUBLICATION_RESOURCE
)
)
@ -268,16 +267,15 @@ case object Crossref2Oaf {
OafMapperUtils.qualifier(
cobjCategory.substring(0, 4),
cobjCategory.substring(5),
ModelConstants.DNET_PUBLICATION_RESOURCE,
ModelConstants.DNET_PUBLICATION_RESOURCE
)
)
instance.setCollectedfrom(createCrossrefCollectedFrom())
instance.setCollectedfrom(collectedFrom)
if (StringUtils.isNotBlank(issuedDate)) {
instance.setDateofacceptance(asField(issuedDate))
instance.setDateofacceptance(issuedDate)
} else {
instance.setDateofacceptance(asField(createdDate.getValue))
instance.setDateofacceptance(createdDate.getValue)
}
val s: List[String] = List("https://doi.org/" + doi)
// val links: List[String] = ((for {JString(url) <- json \ "link" \ "URL"} yield url) ::: List(s)).filter(p => p != null && p.toLowerCase().contains(doi.toLowerCase())).distinct
@ -318,11 +316,10 @@ case object Crossref2Oaf {
if (StringUtils.isNotBlank(orcid))
a.setPid(
List(
createSP(
OafMapperUtils.authorPid(
orcid,
ModelConstants.ORCID_PENDING,
ModelConstants.DNET_PID_TYPES,
generateDataInfo()
OafMapperUtils.qualifier(ModelConstants.ORCID_PENDING, ModelConstants.ORCID_PENDING, ModelConstants.DNET_PID_TYPES),
dataInfo
)
).asJava
)
@ -358,10 +355,7 @@ case object Crossref2Oaf {
if (funderList.nonEmpty) {
resultList = resultList ::: mappingFunderToRelations(
funderList,
result.getId,
createCrossrefCollectedFrom(),
result.getDataInfo,
result.getLastupdatetimestamp
result.getId
)
}
@ -370,16 +364,41 @@ case object Crossref2Oaf {
case dataset: Dataset => convertDataset(dataset)
}
val doisReference: List[String] = for {
JObject(reference_json) <- json \ "reference"
JField("DOI", JString(doi_json)) <- reference_json
} yield doi_json
if (doisReference != null && doisReference.nonEmpty) {
val citation_relations: List[Relation] = generateCitationRelations(doisReference, result)
resultList = resultList ::: citation_relations
}
resultList = resultList ::: List(result)
resultList
}
private def createCiteRelation(sourceId: String, targetPid: String, targetPidType: String): List[Relation] = {
val targetId = IdentifierFactory.idFromPid("50", targetPidType, targetPid, true)
val rel = new Relation
rel.setSource(sourceId)
rel.setTarget(targetId)
rel.setRelType(ModelConstants.RESULT_RESULT)
rel.setRelClass(ModelConstants.CITES)
rel.setSubRelType(ModelConstants.CITATION)
rel.setProvenance(Lists.newArrayList(OafMapperUtils.getProvenance(collectedFrom, dataInfo)))
List(rel)
}
def generateCitationRelations(dois: List[String], result: Result): List[Relation] = {
dois.flatMap(doi => createCiteRelation(result.getId, doi, PidType.doi.toString))
}
def mappingFunderToRelations(
funders: List[mappingFunder],
sourceId: String,
cf: KeyValue,
di: DataInfo,
ts: Long
sourceId: String
): List[Relation] = {
val queue = new mutable.Queue[Relation]
@ -389,7 +408,6 @@ case object Crossref2Oaf {
val tmp2 = StringUtils.substringBefore(tmp1, "/")
logger.debug(s"From $award to $tmp2")
tmp2
}
def extractECAward(award: String): String = {
@ -407,11 +425,9 @@ case object Crossref2Oaf {
r.setRelType(ModelConstants.RESULT_PROJECT)
r.setRelClass(relClass)
r.setSubRelType(ModelConstants.OUTCOME)
r.setCollectedfrom(List(cf).asJava)
r.setDataInfo(di)
r.setLastupdatetimestamp(ts)
r
r.setProvenance(Lists.newArrayList(OafMapperUtils.getProvenance(collectedFrom, dataInfo)))
r
}
def generateSimpleRelationFromAward(
@ -446,6 +462,7 @@ case object Crossref2Oaf {
case "10.13039/501100000781" =>
generateSimpleRelationFromAward(funder, "corda_______", extractECAward)
generateSimpleRelationFromAward(funder, "corda__h2020", extractECAward)
generateSimpleRelationFromAward(funder, "corda_____he", extractECAward)
case "10.13039/100000001" => generateSimpleRelationFromAward(funder, "nsf_________", a => a)
case "10.13039/501100001665" => generateSimpleRelationFromAward(funder, "anr_________", a => a)
case "10.13039/501100002341" => generateSimpleRelationFromAward(funder, "aka_________", a => a)
@ -464,6 +481,13 @@ case object Crossref2Oaf {
val targetId = getProjectId("cihr________", "1e5e62235d094afd01cd56e65112fc63")
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
case "10.13039/100020031" =>
val targetId = getProjectId("tara________", "1e5e62235d094afd01cd56e65112fc63")
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
case "10.13039/501100005416" => generateSimpleRelationFromAward(funder, "rcn_________", a => a)
case "10.13039/501100002848" => generateSimpleRelationFromAward(funder, "conicytf____", a => a)
case "10.13039/501100003448" => generateSimpleRelationFromAward(funder, "gsrt________", extractECAward)
case "10.13039/501100010198" => generateSimpleRelationFromAward(funder, "sgov________", a => a)
@ -487,6 +511,34 @@ case object Crossref2Oaf {
val targetId = getProjectId("wt__________", "1e5e62235d094afd01cd56e65112fc63")
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
//ASAP
case "10.13039/100018231" => generateSimpleRelationFromAward(funder, "asap________", a => a)
//CHIST-ERA
case "10.13039/501100001942" =>
val targetId = getProjectId("chistera____", "1e5e62235d094afd01cd56e65112fc63")
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
//HE
case "10.13039/100018693" | "10.13039/100018694" | "10.13039/100019188" | "10.13039/100019180" |
"10.13039/100018695" | "10.13039/100019185" | "10.13039/100019186" | "10.13039/100019187" =>
generateSimpleRelationFromAward(funder, "corda_____he", extractECAward)
//FCT
case "10.13039/501100001871" =>
generateSimpleRelationFromAward(funder, "fct_________", a => a)
//NHMRC
case "10.13039/501100000925" =>
generateSimpleRelationFromAward(funder, "nhmrc_______", a => a)
//NIH
case "10.13039/100000002" =>
generateSimpleRelationFromAward(funder, "nih_________", a => a)
//NWO
case "10.13039/501100003246" =>
generateSimpleRelationFromAward(funder, "nwo_________", a => a)
//UKRI
case "10.13039/100014013" | "10.13039/501100000267" | "10.13039/501100000268" | "10.13039/501100000269" |
"10.13039/501100000266" | "10.13039/501100006041" | "10.13039/501100000265" | "10.13039/501100000270" |
"10.13039/501100013589" | "10.13039/501100000271" =>
generateSimpleRelationFromAward(funder, "ukri________", a => a)
case _ => logger.debug("no match for " + funder.DOI.get)
@ -499,10 +551,11 @@ case object Crossref2Oaf {
case "European Union's" =>
generateSimpleRelationFromAward(funder, "corda__h2020", extractECAward)
generateSimpleRelationFromAward(funder, "corda_______", extractECAward)
generateSimpleRelationFromAward(funder, "corda_____he", extractECAward)
case "The French National Research Agency (ANR)" | "The French National Research Agency" =>
generateSimpleRelationFromAward(funder, "anr_________", a => a)
case "CONICYT, Programa de Formación de Capital Humano Avanzado" =>
generateSimpleRelationFromAward(funder, "conicytf____", extractECAward)
generateSimpleRelationFromAward(funder, "conicytf____", a => a)
case "Wellcome Trust Masters Fellowship" =>
generateSimpleRelationFromAward(funder, "wt__________", a => a)
val targetId = getProjectId("wt__________", "1e5e62235d094afd01cd56e65112fc63")
@ -531,11 +584,11 @@ case object Crossref2Oaf {
if (ISBN.nonEmpty && containerTitles.nonEmpty) {
val source = s"${containerTitles.head} ISBN: ${ISBN.head}"
if (publication.getSource != null) {
val l: List[Field[String]] = publication.getSource.asScala.toList
val ll: List[Field[String]] = l ::: List(asField(source))
val l: List[String] = publication.getSource.asScala.toList
val ll: List[String] = l ::: List(source)
publication.setSource(ll.asJava)
} else
publication.setSource(List(asField(source)).asJava)
publication.setSource(List(source).asJava)
}
} else {
// Mapping Journal

View File

@ -1,7 +1,7 @@
package eu.dnetlib.doiboost.mag
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory
import eu.dnetlib.dhp.schema.oaf.utils.{IdentifierFactory, MergeUtils, OafMapperUtils}
import eu.dnetlib.dhp.schema.oaf.{Instance, Journal, Publication, StructuredProperty, Subject}
import eu.dnetlib.doiboost.DoiBoostMappingUtil
import eu.dnetlib.doiboost.DoiBoostMappingUtil._
@ -142,8 +142,7 @@ case object ConversionUtil {
def mergePublication(a: Publication, b: Publication): Publication = {
if ((a != null) && (b != null)) {
a.mergeFrom(b)
a
MergeUtils.merge(a, b)
} else {
if (a == null) b else a
}
@ -172,7 +171,7 @@ case object ConversionUtil {
val pub = inputItem._1._2
val abst = inputItem._2
if (abst != null) {
pub.setDescription(List(asField(abst.IndexedAbstract)).asJava)
pub.setDescription(List(abst.IndexedAbstract).asJava)
}
pub
@ -215,10 +214,8 @@ case object ConversionUtil {
s.DisplayName,
classid,
className,
ModelConstants.DNET_SUBJECT_TYPOLOGIES,
ModelConstants.DNET_SUBJECT_TYPOLOGIES
)
val di = DoiBoostMappingUtil.generateDataInfo(s.Score.toString)
var resList: List[Subject] = List(s1)
if (s.MainType.isDefined) {
val maintp = s.MainType.get
@ -226,20 +223,18 @@ case object ConversionUtil {
s.MainType.get,
classid,
className,
ModelConstants.DNET_SUBJECT_TYPOLOGIES,
ModelConstants.DNET_SUBJECT_TYPOLOGIES
)
s2.setDataInfo(di)
s2.setDataInfo(dataInfo)
resList = resList ::: List(s2)
if (maintp.contains(".")) {
val s3 = createSubject(
maintp.split("\\.").head,
classid,
className,
ModelConstants.DNET_SUBJECT_TYPOLOGIES,
ModelConstants.DNET_SUBJECT_TYPOLOGIES
)
s3.setDataInfo(di)
s3.setDataInfo(dataInfo)
resList = resList ::: List(s3)
}
}
@ -250,36 +245,6 @@ case object ConversionUtil {
publication
}
def addInstances(a: (Publication, MagUrl)): Publication = {
val pub = a._1
val urls = a._2
val i = new Instance
if (urls != null) {
val l: List[String] = urls.instances
.filter(k => k.SourceUrl.nonEmpty)
.map(k => k.SourceUrl) ::: List(
s"https://academic.microsoft.com/#/detail/${extractMagIdentifier(pub.getOriginalId.asScala)}"
)
i.setUrl(l.asJava)
} else
i.setUrl(
List(
s"https://academic.microsoft.com/#/detail/${extractMagIdentifier(pub.getOriginalId.asScala)}"
).asJava
)
// Ticket #6281 added pid to Instance
i.setPid(pub.getPid)
i.setCollectedfrom(createMAGCollectedFrom())
pub.setInstance(List(i).asJava)
pub
}
def transformPaperAbstract(input: MagPaperAbstract): MagPaperAbstract = {
MagPaperAbstract(input.PaperId, convertInvertedIndexString(input.IndexedAbstract))
}
@ -306,21 +271,23 @@ case object ConversionUtil {
createSP(paper.OriginalTitle, "alternative title", ModelConstants.DNET_DATACITE_TITLE)
pub.setTitle(List(mainTitles, originalTitles).asJava)
pub.setSource(List(asField(paper.BookTitle)).asJava)
pub.setSource(List(paper.BookTitle).asJava)
val authorsOAF = authors.authors.map { f: MagAuthorAffiliation =>
val a: eu.dnetlib.dhp.schema.oaf.Author = new eu.dnetlib.dhp.schema.oaf.Author
a.setRank(f.sequenceNumber)
if (f.author.DisplayName.isDefined)
a.setFullname(f.author.DisplayName.get)
if (f.affiliation != null)
a.setAffiliation(List(asField(f.affiliation)).asJava)
a.setPid(
List(
createSP(
OafMapperUtils.authorPid(
s"https://academic.microsoft.com/#/detail/${f.author.AuthorId}",
"URL",
ModelConstants.DNET_PID_TYPES
OafMapperUtils.qualifier(
"URL",
"URL",
ModelConstants.DNET_PID_TYPES
),
dataInfo
)
).asJava
)
@ -329,9 +296,10 @@ case object ConversionUtil {
pub.setAuthor(authorsOAF.asJava)
if (paper.Date != null && paper.Date.isDefined) {
pub.setDateofacceptance(asField(paper.Date.get.toString.substring(0, 10)))
pub.setDateofacceptance(paper.Date.get.toString.substring(0, 10))
}
pub.setPublisher(asField(paper.Publisher))
pub.setPublisher(OafMapperUtils.publisher(paper.Publisher))
if (journal != null && journal.DisplayName.isDefined) {
val j = new Journal
@ -340,7 +308,7 @@ case object ConversionUtil {
j.setSp(paper.FirstPage)
j.setEp(paper.LastPage)
if (journal.Publisher.isDefined)
pub.setPublisher(asField(journal.Publisher.get))
pub.setPublisher(OafMapperUtils.publisher(journal.Publisher.get))
if (journal.Issn.isDefined)
j.setIssnPrinted(journal.Issn.get)
j.setVol(paper.Volume)
@ -348,71 +316,10 @@ case object ConversionUtil {
pub.setJournal(j)
}
pub.setCollectedfrom(List(createMAGCollectedFrom()).asJava)
pub.setDataInfo(generateDataInfo())
pub.setDataInfo(generateEntityDataInfo())
pub
}
def createOAF(
inputParams: ((MagPapers, MagPaperWithAuthorList), MagPaperAbstract)
): Publication = {
val paper = inputParams._1._1
val authors = inputParams._1._2
val description = inputParams._2
val pub = new Publication
pub.setPid(List(createSP(paper.Doi, "doi", ModelConstants.DNET_PID_TYPES)).asJava)
pub.setOriginalId(List(paper.PaperId.toString, paper.Doi).asJava)
//IMPORTANT
//The old method result.setId(generateIdentifier(result, doi))
//will be replaced using IdentifierFactory
pub.setId(IdentifierFactory.createDOIBoostIdentifier(pub))
val mainTitles = createSP(paper.PaperTitle, "main title", ModelConstants.DNET_DATACITE_TITLE)
val originalTitles =
createSP(paper.OriginalTitle, "alternative title", ModelConstants.DNET_DATACITE_TITLE)
pub.setTitle(List(mainTitles, originalTitles).asJava)
pub.setSource(List(asField(paper.BookTitle)).asJava)
if (description != null) {
pub.setDescription(List(asField(description.IndexedAbstract)).asJava)
}
val authorsOAF = authors.authors.map { f: MagAuthorAffiliation =>
val a: eu.dnetlib.dhp.schema.oaf.Author = new eu.dnetlib.dhp.schema.oaf.Author
a.setFullname(f.author.DisplayName.get)
if (f.affiliation != null)
a.setAffiliation(List(asField(f.affiliation)).asJava)
a.setPid(
List(
createSP(
s"https://academic.microsoft.com/#/detail/${f.author.AuthorId}",
"URL",
ModelConstants.DNET_PID_TYPES
)
).asJava
)
a
}
if (paper.Date != null) {
pub.setDateofacceptance(asField(paper.Date.toString.substring(0, 10)))
}
pub.setAuthor(authorsOAF.asJava)
pub
}
def convertInvertedIndexString(json_input: String): String = {
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
lazy val json: json4s.JValue = parse(json_input)

View File

@ -2,10 +2,10 @@ package eu.dnetlib.doiboost.orcid
import com.fasterxml.jackson.databind.ObjectMapper
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory
import eu.dnetlib.dhp.schema.oaf.{Author, DataInfo, Publication}
import eu.dnetlib.dhp.schema.oaf.utils.{IdentifierFactory, OafMapperUtils}
import eu.dnetlib.dhp.schema.oaf.{Author, DataInfo, EntityDataInfo, Publication}
import eu.dnetlib.doiboost.DoiBoostMappingUtil
import eu.dnetlib.doiboost.DoiBoostMappingUtil.{createSP, generateDataInfo}
import eu.dnetlib.doiboost.DoiBoostMappingUtil.{createSP, generateEntityDataInfo}
import org.apache.commons.lang.StringUtils
import org.json4s
import org.json4s.DefaultFormats
@ -104,7 +104,7 @@ object ORCIDToOAF {
val doi = input.doi
val pub: Publication = new Publication
pub.setPid(List(createSP(doi, "doi", ModelConstants.DNET_PID_TYPES)).asJava)
pub.setDataInfo(generateDataInfo())
pub.setDataInfo(generateEntityDataInfo())
pub.setId(IdentifierFactory.createDOIBoostIdentifier(pub))
if (pub.getId == null)
@ -118,7 +118,7 @@ object ORCIDToOAF {
pub.setAuthor(l.asJava)
pub.setCollectedfrom(List(DoiBoostMappingUtil.createORIDCollectedFrom()).asJava)
pub.setDataInfo(DoiBoostMappingUtil.generateDataInfo())
pub.setDataInfo(DoiBoostMappingUtil.generateEntityDataInfo())
pub
} catch {
case e: Throwable =>
@ -127,8 +127,10 @@ object ORCIDToOAF {
}
}
val orcidPidDataInfo = generateOricPIDDatainfo()
def generateOricPIDDatainfo(): DataInfo = {
val di = DoiBoostMappingUtil.generateDataInfo("0.91")
val di = DoiBoostMappingUtil.generateDataInfo()
di.setTrust(.91f)
di.getProvenanceaction.setClassid(ModelConstants.SYSIMPORT_CROSSWALK_ENTITYREGISTRY)
di.getProvenanceaction.setClassname(ModelConstants.HARVESTED)
di
@ -149,11 +151,11 @@ object ORCIDToOAF {
if (StringUtils.isNotBlank(o.oid))
a.setPid(
List(
createSP(
OafMapperUtils.authorPid(
o.oid,
ModelConstants.ORCID,
ModelConstants.DNET_PID_TYPES,
generateOricPIDDatainfo()
orcidPidDataInfo
)
).asJava
)

View File

@ -1,7 +1,7 @@
package eu.dnetlib.doiboost.uw
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory
import eu.dnetlib.dhp.schema.oaf.utils.{IdentifierFactory, OafMapperUtils, PidType}
import eu.dnetlib.dhp.schema.oaf.{AccessRight, Instance, OpenAccessRoute, Publication}
import eu.dnetlib.doiboost.DoiBoostMappingUtil
import eu.dnetlib.doiboost.DoiBoostMappingUtil._
@ -90,7 +90,7 @@ object UnpayWallToOAF {
val colour = get_unpaywall_color((json \ "oa_status").extractOrElse[String](null))
pub.setCollectedfrom(List(createUnpayWallCollectedFrom()).asJava)
pub.setDataInfo(generateDataInfo())
pub.setDataInfo(generateEntityDataInfo())
if (!is_oa)
return null
@ -104,7 +104,7 @@ object UnpayWallToOAF {
i.setUrl(List(oaLocation.url.get).asJava)
if (oaLocation.license.isDefined)
i.setLicense(asField(oaLocation.license.get))
i.setLicense(OafMapperUtils.license(oaLocation.license.get))
pub.setPid(List(createSP(doi, "doi", ModelConstants.DNET_PID_TYPES)).asJava)
// Ticket #6282 Adding open Access Colour
@ -113,10 +113,9 @@ object UnpayWallToOAF {
a.setClassid(ModelConstants.ACCESS_RIGHT_OPEN)
a.setClassname(ModelConstants.ACCESS_RIGHT_OPEN)
a.setSchemeid(ModelConstants.DNET_ACCESS_MODES)
a.setSchemename(ModelConstants.DNET_ACCESS_MODES)
a.setOpenAccessRoute(colour.get)
i.setAccessright(a)
i.setPid(List(createSP(doi, "doi", ModelConstants.DNET_PID_TYPES)).asJava)
i.setPid(List(createSP(doi, PidType.doi.toString, ModelConstants.DNET_PID_TYPES)).asJava)
}
pub.setInstance(List(i).asJava)

View File

@ -60,7 +60,7 @@ class PublicationToOafTest {
});
assertNotNull(oafPublication.getCollectedfrom());
if (oafPublication.getSource() != null) {
logger.info((oafPublication.getSource().get(0).getValue()));
logger.info((oafPublication.getSource().get(0)));
}
if (oafPublication.getExternalReference() != null) {
oafPublication.getExternalReference().forEach(e -> {

View File

@ -1,9 +1,14 @@
package eu.dnetlib.dhp.doiboost.crossref
import eu.dnetlib.dhp.schema.common.ModelConstants
import eu.dnetlib.dhp.schema.oaf._
import eu.dnetlib.dhp.utils.DHPUtils
import eu.dnetlib.doiboost.crossref.Crossref2Oaf
import org.codehaus.jackson.map.{ObjectMapper, SerializationConfig}
import org.json4s
import org.json4s.JsonAST.{JField, JObject, JString}
import org.json4s.{DefaultFormats, JValue}
import org.json4s.jackson.JsonMethods
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.Test
import org.slf4j.{Logger, LoggerFactory}
@ -31,13 +36,13 @@ class CrossrefMappingTest {
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/funder_doi"))
.mkString
for (line <- funder_doi.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- funder_doi.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty)
checkRelation(resultList)
}
for (line <- funder_name.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- funder_name.linesWithSeparators.map(l => l.stripLineEnd)) {
val json = template.replace("%s", line)
val resultList: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(resultList.nonEmpty)
@ -109,6 +114,47 @@ class CrossrefMappingTest {
}
private def parseJson(input: String): JValue = {
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
lazy val json: json4s.JValue = JsonMethods.parse(input)
json
}
@Test
def testCitationRelations(): Unit = {
val json = Source
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/doiboost/crossref/publication_license_embargo.json"))
.mkString
assertNotNull(json)
assertFalse(json.isEmpty)
val result: List[Oaf] = Crossref2Oaf.convert(json)
assertTrue(result.nonEmpty)
val j = parseJson(json)
val doisReference: List[String] = for {
JObject(reference_json) <- j \ "reference"
JField("DOI", JString(doi_json)) <- reference_json
} yield doi_json
val relationList: List[Relation] = result
.filter(s => s.isInstanceOf[Relation])
.map(r => r.asInstanceOf[Relation])
.filter(r => r.getSubRelType.equalsIgnoreCase(ModelConstants.CITATION))
assertNotNull(relationList)
assertFalse(relationList.isEmpty)
assertEquals(doisReference.size * 2, relationList.size)
mapper.getSerializationConfig.enable(SerializationConfig.Feature.INDENT_OUTPUT)
relationList.foreach(p => println(mapper.writeValueAsString(p)))
}
@Test
def testEmptyTitle(): Unit = {
val json = Source
@ -227,10 +273,6 @@ class CrossrefMappingTest {
result.getDataInfo.getProvenanceaction.getSchemeid.isEmpty,
"DataInfo/Provenance/SchemeId test not null Failed"
);
assertFalse(
result.getDataInfo.getProvenanceaction.getSchemename.isEmpty,
"DataInfo/Provenance/SchemeName test not null Failed"
);
assertNotNull(result.getCollectedfrom, "CollectedFrom test not null Failed");
assertFalse(result.getCollectedfrom.isEmpty);
@ -303,10 +345,6 @@ class CrossrefMappingTest {
result.getDataInfo.getProvenanceaction.getSchemeid.isEmpty,
"DataInfo/Provenance/SchemeId test not null Failed"
);
assertFalse(
result.getDataInfo.getProvenanceaction.getSchemename.isEmpty,
"DataInfo/Provenance/SchemeName test not null Failed"
);
assertNotNull(result.getCollectedfrom, "CollectedFrom test not null Failed");
assertFalse(result.getCollectedfrom.isEmpty);
@ -387,10 +425,6 @@ class CrossrefMappingTest {
result.getDataInfo.getProvenanceaction.getSchemeid.isEmpty,
"DataInfo/Provenance/SchemeId test not null Failed"
);
assertFalse(
result.getDataInfo.getProvenanceaction.getSchemename.isEmpty,
"DataInfo/Provenance/SchemeName test not null Failed"
);
assertNotNull(result.getCollectedfrom, "CollectedFrom test not null Failed");
assertFalse(result.getCollectedfrom.isEmpty);
@ -435,10 +469,6 @@ class CrossrefMappingTest {
result.getDataInfo.getProvenanceaction.getSchemeid.isEmpty,
"DataInfo/Provenance/SchemeId test not null Failed"
);
assertFalse(
result.getDataInfo.getProvenanceaction.getSchemename.isEmpty,
"DataInfo/Provenance/SchemeName test not null Failed"
);
assertNotNull(result.getCollectedfrom, "CollectedFrom test not null Failed");
assertFalse(result.getCollectedfrom.isEmpty);
@ -586,7 +616,7 @@ class CrossrefMappingTest {
println(mapper.writeValueAsString(item))
assertTrue(
item.getInstance().asScala exists (i => i.getLicense.getValue.equals("https://www.springer.com/vor"))
item.getInstance().asScala exists (i => i.getLicense.getUrl.equals("https://www.springer.com/vor"))
)
assertTrue(
item.getInstance().asScala exists (i => i.getAccessright.getClassid.equals("CLOSED"))
@ -614,7 +644,7 @@ class CrossrefMappingTest {
assertTrue(
item.getInstance().asScala exists (i =>
i.getLicense.getValue.equals(
i.getLicense.getUrl.equals(
"http://pubs.acs.org/page/policy/authorchoice_ccby_termsofuse.html"
)
)
@ -649,7 +679,7 @@ class CrossrefMappingTest {
assertTrue(
item.getInstance().asScala exists (i =>
i.getLicense.getValue.equals(
i.getLicense.getUrl.equals(
"https://academic.oup.com/journals/pages/open_access/funder_policies/chorus/standard_publication_model"
)
)
@ -684,7 +714,7 @@ class CrossrefMappingTest {
assertTrue(
item.getInstance().asScala exists (i =>
i.getLicense.getValue.equals(
i.getLicense.getUrl.equals(
"https://academic.oup.com/journals/pages/open_access/funder_policies/chorus/standard_publication_model"
)
)
@ -719,7 +749,7 @@ class CrossrefMappingTest {
assertTrue(
item.getInstance().asScala exists (i =>
i.getLicense.getValue.equals(
i.getLicense.getUrl.equals(
"https://academic.oup.com/journals/pages/open_access/funder_policies/chorus/standard_publication_model"
)
)

View File

@ -25,9 +25,11 @@ class MappingORCIDToOAFTest {
.mkString
assertNotNull(json)
assertFalse(json.isEmpty)
json.linesWithSeparators.map(l =>l.stripLineEnd).foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
})
json.linesWithSeparators
.map(l => l.stripLineEnd)
.foreach(s => {
assertNotNull(ORCIDToOAF.extractValueFromInputString(s))
})
}
@Test

View File

@ -22,7 +22,7 @@ class UnpayWallMappingTest {
.mkString
var i: Int = 0
for (line <- Ilist.linesWithSeparators.map(l =>l.stripLineEnd)) {
for (line <- Ilist.linesWithSeparators.map(l => l.stripLineEnd)) {
val p = UnpayWallToOAF.convertToOAF(line)
if (p != null) {
@ -43,7 +43,7 @@ class UnpayWallMappingTest {
i = i + 1
}
val l = Ilist.linesWithSeparators.map(l =>l.stripLineEnd).next()
val l = Ilist.linesWithSeparators.map(l => l.stripLineEnd).next()
val item = UnpayWallToOAF.convertToOAF(l)

View File

@ -288,19 +288,6 @@ public class CountryPropagationJobTest {
tmp
.foreach(
r -> r.getCountry().stream().forEach(c -> Assertions.assertEquals("dnet:countries", c.getSchemeid())));
tmp
.foreach(
r -> r
.getCountry()
.stream()
.forEach(c -> Assertions.assertEquals("dnet:countries", c.getSchemename())));
tmp
.foreach(
r -> r
.getCountry()
.stream()
.forEach(c -> Assertions.assertFalse(c.getDataInfo().getDeletedbyinference())));
tmp.foreach(r -> r.getCountry().stream().forEach(c -> Assertions.assertFalse(c.getDataInfo().getInvisible())));
tmp.foreach(r -> r.getCountry().stream().forEach(c -> Assertions.assertTrue(c.getDataInfo().getInferred())));
tmp
.foreach(
@ -328,16 +315,6 @@ public class CountryPropagationJobTest {
c -> Assertions
.assertEquals(
"dnet:provenanceActions", c.getDataInfo().getProvenanceaction().getSchemeid())));
tmp
.foreach(
r -> r
.getCountry()
.stream()
.forEach(
c -> Assertions
.assertEquals(
"dnet:provenanceActions", c.getDataInfo().getProvenanceaction().getSchemename())));
List<Country> countries = tmp
.filter(r -> r.getId().equals("50|06cdd3ff4700::49ec404cee4e1452808aabeaffbd3072"))
.collect()

View File

@ -125,25 +125,25 @@ public class SparkJobTest {
.foreach(
r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getProvenance().get(0).getDataInfo().getInferenceprovenance()));
tmp
.foreach(
r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_RELATION_RESULT_ORGANIZATION_SEM_REL_CLASS_ID,
r.getDataInfo().getProvenanceaction().getClassid()));
r.getProvenance().get(0).getDataInfo().getProvenanceaction().getClassid()));
tmp
.foreach(
r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_RELATION_RESULT_ORGANIZATION_SEM_REL_CLASS_NAME,
r.getDataInfo().getProvenanceaction().getClassname()));
r.getProvenance().get(0).getDataInfo().getProvenanceaction().getClassname()));
tmp
.foreach(
r -> Assertions
.assertEquals(
"0.85",
r.getDataInfo().getTrust()));
r.getProvenance().get(0).getDataInfo().getTrust()));
Assertions.assertEquals(9, tmp.filter(r -> r.getSource().substring(0, 3).equals("50|")).count());
tmp

View File

@ -102,10 +102,11 @@ public class StepActionsTest {
verificationDs
.foreach(
(ForeachFunction<Relation>) r -> Assertions
.assertEquals("propagation", r.getDataInfo().getInferenceprovenance()));
.assertEquals("propagation", r.getProvenance().get(0).getDataInfo().getInferenceprovenance()));
verificationDs
.foreach((ForeachFunction<Relation>) r -> Assertions.assertEquals("0.85", r.getDataInfo().getTrust()));
.foreach((ForeachFunction<Relation>) r -> Assertions
.assertEquals("0.85", r.getProvenance().get(0).getDataInfo().getTrust()));
verificationDs
.foreach((ForeachFunction<Relation>) r -> Assertions.assertEquals("50|", r.getSource().substring(0, 3)));
@ -133,14 +134,14 @@ public class StepActionsTest {
(ForeachFunction<Relation>) r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_RELATION_RESULT_ORGANIZATION_SEM_REL_CLASS_ID,
r.getDataInfo().getProvenanceaction().getClassid()));
r.getProvenance().get(0).getDataInfo().getProvenanceaction().getClassid()));
verificationDs
.foreach(
(ForeachFunction<Relation>) r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_RELATION_RESULT_ORGANIZATION_SEM_REL_CLASS_NAME,
r.getDataInfo().getProvenanceaction().getClassname()));
r.getProvenance().get(0).getDataInfo().getProvenanceaction().getClassname()));
verificationDs
.filter(

View File

@ -8,14 +8,14 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import eu.dnetlib.dhp.schema.oaf.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.expressions.Aggregator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -25,6 +25,7 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
import scala.Tuple2;
/**
@ -107,11 +108,11 @@ public class MergeGraphTableSparkJob {
Class<B> b_clazz,
String outputPath) {
Dataset<Tuple2<String, B>> beta = readTableFromPath(spark, betaInputPath, b_clazz);
Dataset<Tuple2<String, P>> prod = readTableFromPath(spark, prodInputPath, p_clazz);
Dataset<Tuple2<String, B>> beta = readTableAndGroupById(spark, betaInputPath, b_clazz);
Dataset<Tuple2<String, P>> prod = readTableAndGroupById(spark, prodInputPath, p_clazz);
prod
.joinWith(beta, prod.col("_1").equalTo(beta.col("_1")), "full_outer")
.joinWith(beta, prod.col("value").equalTo(beta.col("value")), "full_outer")
.map((MapFunction<Tuple2<Tuple2<String, P>, Tuple2<String, B>>, P>) value -> {
Optional<P> p = Optional.ofNullable(value._1()).map(Tuple2::_2);
Optional<B> b = Optional.ofNullable(value._2()).map(Tuple2::_2);
@ -126,12 +127,13 @@ public class MergeGraphTableSparkJob {
case "PROD":
return mergeWithPriorityToPROD(p, b);
}
}, Encoders.bean(p_clazz))
}, Encoders.kryo(p_clazz))
.filter((FilterFunction<P>) Objects::nonNull)
.map((MapFunction<P, String>) OBJECT_MAPPER::writeValueAsString, Encoders.STRING())
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath);
.text(outputPath);
}
/**
@ -212,20 +214,65 @@ public class MergeGraphTableSparkJob {
return null;
}
private static <T extends Oaf> Dataset<Tuple2<String, T>> readTableFromPath(
private static <T extends Oaf> Dataset<Tuple2<String, T>> readTableAndGroupById(
SparkSession spark, String inputEntityPath, Class<T> clazz) {
final TypedColumn<T, T> aggregator = new GroupingAggregator(clazz).toColumn();
log.info("Reading Graph table from: {}", inputEntityPath);
return spark
.read()
.textFile(inputEntityPath)
.map(
(MapFunction<String, Tuple2<String, T>>) value -> {
final T t = OBJECT_MAPPER.readValue(value, clazz);
final String id = ModelSupport.idFn().apply(t);
return new Tuple2<>(id, t);
},
Encoders.tuple(Encoders.STRING(), Encoders.kryo(clazz)));
.map((MapFunction<String, T>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.kryo(clazz))
.groupByKey((MapFunction<T, String>) oaf -> ModelSupport.idFn().apply(oaf), Encoders.STRING())
.agg(aggregator);
}
public static class GroupingAggregator<T extends Oaf> extends Aggregator<T, T, T> {
private Class<T> clazz;
public GroupingAggregator(Class<T> clazz) {
this.clazz = clazz;
}
@Override
public T zero() {
return null;
}
@Override
public T reduce(T b, T a) {
return mergeAndGet(b, a);
}
private T mergeAndGet(T b, T a) {
if (Objects.nonNull(a) && Objects.nonNull(b)) {
MergeUtils.merge(b, a);
}
return Objects.isNull(a) ? b : a;
}
@Override
public T merge(T b, T a) {
return mergeAndGet(b, a);
}
@Override
public T finish(T j) {
return j;
}
@Override
public Encoder<T> bufferEncoder() {
return Encoders.kryo(clazz);
}
@Override
public Encoder<T> outputEncoder() {
return Encoders.kryo(clazz);
}
}
private static void removeOutputDir(SparkSession spark, String path) {

View File

@ -122,7 +122,7 @@ public abstract class AbstractMdRecordToOafMapper {
final EntityDataInfo info = prepareDataInfo(doc, invisible);
final long lastUpdateTimestamp = new Date().getTime();
final List<Instance> instances = prepareInstances(doc, info, collectedFrom, hostedBy);
final List<Instance> instances = prepareInstances(doc, collectedFrom, hostedBy);
final String type = getResultType(doc, instances);
@ -311,14 +311,16 @@ public abstract class AbstractMdRecordToOafMapper {
final Document doc,
final List<Instance> instances,
final KeyValue collectedFrom,
final EntityDataInfo info,
final EntityDataInfo entityDataInfo,
final long lastUpdateTimestamp) {
r.setDataInfo(info);
final DataInfo info = OafMapperUtils.fromEntityDataInfo(entityDataInfo);
r.setDataInfo(entityDataInfo);
r.setLastupdatetimestamp(lastUpdateTimestamp);
r.setId(createOpenaireId(50, doc.valueOf("//dri:objIdentifier"), false));
r.setOriginalId(findOriginalId(doc));
r.setCollectedfrom(Arrays.asList(collectedFrom));
r.setPid(IdentifierFactory.getPids(prepareResultPids(doc, info), collectedFrom));
r.setPid(IdentifierFactory.getPids(prepareResultPids(doc), collectedFrom));
r.setDateofcollection(doc.valueOf("//dr:dateOfCollection/text()|//dri:dateOfCollection/text()"));
r.setDateoftransformation(doc.valueOf("//dr:dateOfTransformation/text()|//dri:dateOfTransformation/text()"));
r.setExtraInfo(new ArrayList<>()); // NOT PRESENT IN MDSTORES
@ -351,7 +353,7 @@ public abstract class AbstractMdRecordToOafMapper {
r.setEoscifguidelines(prepareEOSCIfGuidelines(doc, info));
}
protected abstract List<StructuredProperty> prepareResultPids(Document doc, DataInfo info);
protected abstract List<StructuredProperty> prepareResultPids(Document doc);
private List<Context> prepareContexts(final Document doc, final DataInfo info) {
final List<Context> list = new ArrayList<>();
@ -390,7 +392,6 @@ public abstract class AbstractMdRecordToOafMapper {
protected abstract List<Instance> prepareInstances(
Document doc,
DataInfo info,
KeyValue collectedfrom,
KeyValue hostedby);
@ -504,8 +505,7 @@ public abstract class AbstractMdRecordToOafMapper {
final Node node,
final String xpath,
final String xpathClassId,
final String schemeId,
final DataInfo info) {
final String schemeId) {
final List<StructuredProperty> res = new ArrayList<>();
for (final Object o : node.selectNodes(xpath)) {

View File

@ -1,16 +1,16 @@
package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.common.ModelSupport;
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.Text;
@ -18,22 +18,19 @@ import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import scala.Tuple2;
public class GenerateEntitiesApplication {
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
public class GenerateEntitiesApplication extends AbstractMigrationApplication {
private static final Logger log = LoggerFactory.getLogger(GenerateEntitiesApplication.class);
@ -109,15 +106,12 @@ public class GenerateEntitiesApplication {
final boolean shouldHashId,
final Mode mode) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final List<String> existingSourcePaths = Arrays
.stream(sourcePaths.split(","))
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
.collect(Collectors.toList());
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
log.info("Generate entities from files:");
existingSourcePaths.forEach(log::info);
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<Oaf> inputRdd = sc.emptyRDD();
for (final String sp : existingSourcePaths) {
@ -136,7 +130,7 @@ public class GenerateEntitiesApplication {
save(
inputRdd
.mapToPair(oaf -> new Tuple2<>(ModelSupport.idFn().apply(oaf), oaf))
.reduceByKey((Function2<Oaf, Oaf, Oaf>) (v1, v2) -> MergeUtils.merge(v1, v2, true))
.reduceByKey(MergeUtils::merge)
.map(Tuple2::_2),
targetPath);
break;

View File

@ -47,7 +47,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
private static final List<KeyValue> COLLECTED_FROM_CLAIM = listKeyValues(
createOpenaireId(10, "infrastruct_::openaire", true), "OpenAIRE");
private final static List<Provenance> PROVENANCE_CLAIM = getProvenance(COLLECTED_FROM_CLAIM, ENTITY_DATA_INFO_CLAIM);
private final static List<Provenance> PROVENANCE_CLAIM = getProvenance(COLLECTED_FROM_CLAIM, REL_DATA_INFO_CLAIM);
public static final String SOURCE_TYPE = "source_type";
public static final String TARGET_TYPE = "target_type";
@ -457,7 +457,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
}
r.setId(createOpenaireId(50, rs.getString("target_id"), false));
r.setLastupdatetimestamp(lastUpdateTimestamp);
r.setContext(prepareContext(rs.getString("source_id"), ENTITY_DATA_INFO_CLAIM));
r.setContext(prepareContext(rs.getString("source_id"), REL_DATA_INFO_CLAIM));
r.setDataInfo(ENTITY_DATA_INFO_CLAIM);
r.setCollectedfrom(COLLECTED_FROM_CLAIM);
@ -632,7 +632,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
public List<Oaf> processOrgOrgMergeRels(final ResultSet rs) {
try {
final DataInfo info = prepareDataInfo(rs); // TODO
final DataInfo info = OafMapperUtils.fromEntityDataInfo(prepareDataInfo(rs)); // TODO
final String orgId1 = createOpenaireId(20, rs.getString("id1"), true);
final String orgId2 = createOpenaireId(20, rs.getString("id2"), true);
@ -649,7 +649,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
public List<Oaf> processOrgOrgParentChildRels(final ResultSet rs) {
try {
final DataInfo info = prepareDataInfo(rs); // TODO
final DataInfo info = OafMapperUtils.fromEntityDataInfo(prepareDataInfo(rs)); // TODO
final String orgId1 = createOpenaireId(20, rs.getString("source"), true);
final String orgId2 = createOpenaireId(20, rs.getString("target"), true);
@ -668,7 +668,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
public List<Oaf> processOrgOrgSimRels(final ResultSet rs) {
try {
final DataInfo info = prepareDataInfo(rs); // TODO
final DataInfo info = OafMapperUtils.fromEntityDataInfo(prepareDataInfo(rs)); // TODO
final String orgId1 = createOpenaireId(20, rs.getString("id1"), true);
final String orgId2 = createOpenaireId(20, rs.getString("id2"), true);

View File

@ -6,11 +6,7 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.IOException;
import java.io.StringReader;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
@ -24,6 +20,7 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
@ -110,6 +107,7 @@ public class MigrateHdfsMdstoresApplication extends AbstractMigrationApplication
.read()
.parquet(validPaths)
.map((MapFunction<Row, String>) MigrateHdfsMdstoresApplication::enrichRecord, Encoders.STRING())
.filter((FilterFunction<String>) Objects::nonNull)
.toJavaRDD()
.mapToPair(xml -> new Tuple2<>(new Text(UUID.randomUUID() + ":" + type), new Text(xml)))
// .coalesce(1)
@ -135,13 +133,14 @@ public class MigrateHdfsMdstoresApplication extends AbstractMigrationApplication
reader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
final Document doc = reader.read(new StringReader(xml));
final Element head = (Element) doc.selectSingleNode("//*[local-name() = 'header']");
head.addElement(new QName("objIdentifier", DRI_NS_PREFIX)).addText(r.getAs("id"));
head.addElement(new QName("dateOfCollection", DRI_NS_PREFIX)).addText(collDate);
head.addElement(new QName("dateOfTransformation", DRI_NS_PREFIX)).addText(tranDate);
return doc.asXML();
} catch (final Exception e) {
log.error("Error patching record: " + xml);
throw new RuntimeException("Error patching record: " + xml, e);
return null;
}
}

View File

@ -1,31 +1,87 @@
package eu.dnetlib.dhp.oa.graph.raw;
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
import java.io.Closeable;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.function.Consumer;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.MDStoreInfo;
import eu.dnetlib.dhp.common.MdstoreClient;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
public class MigrateMongoMdstoresApplication extends AbstractMigrationApplication implements Closeable {
private static final Logger log = LoggerFactory.getLogger(MigrateMongoMdstoresApplication.class);
private final MdstoreClient mdstoreClient;
private static List<MDStoreInfo> snapshotsMDStores(final MdstoreClient client,
final String format,
final String layout,
final String interpretation) {
return client.mdStoreWithTimestamp(format, layout, interpretation);
}
private static MDStoreInfo extractPath(final String path, final String basePath) {
int res = path.indexOf(basePath);
if (res > 0) {
String[] split = path.substring(res).split("/");
if (split.length > 2) {
final String ts = split[split.length - 1];
final String mdStore = split[split.length - 2];
return new MDStoreInfo(mdStore, null, Long.parseLong(ts));
}
}
return null;
}
private static Map<String, MDStoreInfo> hdfsMDStoreInfo(FileSystem fs, final String basePath) throws IOException {
final Map<String, MDStoreInfo> hdfs_store = new HashMap<>();
final Path p = new Path(basePath);
final RemoteIterator<LocatedFileStatus> ls = fs.listFiles(p, true);
while (ls.hasNext()) {
String current = ls.next().getPath().toString();
final MDStoreInfo info = extractPath(current, basePath);
if (info != null) {
hdfs_store.put(info.getMdstore(), info);
}
}
return hdfs_store;
}
private static String createMDStoreDir(final String basePath, final String mdStoreId) {
if (basePath.endsWith("/")) {
return basePath + mdStoreId;
} else {
return String.format("%s/%s", basePath, mdStoreId);
}
}
public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
MigrateMongoMdstoresApplication.class
.getResourceAsStream("/eu/dnetlib/dhp/oa/graph/migrate_mongo_mstores_parameters.json")));
Objects
.requireNonNull(
MigrateMongoMdstoresApplication.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/graph/migrate_mongo_mstores_parameters.json"))));
parser.parseArgument(args);
final String mongoBaseUrl = parser.get("mongoBaseUrl");
@ -36,30 +92,118 @@ public class MigrateMongoMdstoresApplication extends AbstractMigrationApplicatio
final String mdInterpretation = parser.get("mdInterpretation");
final String hdfsPath = parser.get("hdfsPath");
final String nameNode = parser.get("nameNode");
final FileSystem fileSystem = FileSystem.get(getHadoopConfiguration(nameNode));
final MdstoreClient mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
final List<MDStoreInfo> mongoMDStores = snapshotsMDStores(mdstoreClient, mdFormat, mdLayout, mdInterpretation);
final Map<String, MDStoreInfo> hdfsMDStores = hdfsMDStoreInfo(fileSystem, hdfsPath);
mongoMDStores
.stream()
.filter(currentMDStore -> currentMDStore.getLatestTimestamp() != null)
.forEach(
consumeMDStore(
mdFormat, mdLayout, mdInterpretation, hdfsPath, fileSystem, mongoBaseUrl, mongoDb, hdfsMDStores));
// TODO: DELETE MDStORE FOLDER NOT PRESENT IN MONGO
try (MigrateMongoMdstoresApplication app = new MigrateMongoMdstoresApplication(hdfsPath, mongoBaseUrl,
mongoDb)) {
app.execute(mdFormat, mdLayout, mdInterpretation);
}
}
public MigrateMongoMdstoresApplication(
final String hdfsPath, final String mongoBaseUrl, final String mongoDb) throws Exception {
/**
* This method is responsible to sync only the stores that have been changed since last time
* @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout
* @param mdInterpretation the MDStore's interpretation
* @param hdfsPath the basePath into hdfs where all MD-stores are stored
* @param fileSystem The Hadoop File system client
* @param hdfsMDStores A Map containing as Key the mdstore ID and as value the @{@link MDStoreInfo}
* @return
*/
private static Consumer<MDStoreInfo> consumeMDStore(String mdFormat, String mdLayout, String mdInterpretation,
String hdfsPath, FileSystem fileSystem, final String mongoBaseUrl, final String mongoDb,
Map<String, MDStoreInfo> hdfsMDStores) {
return currentMDStore -> {
// If the key is missing it means that the mdstore is not present in hdfs
// that is the hdfs path basePath/MDSTOREID/timestamp is missing
// So we have to synch it
if (!hdfsMDStores.containsKey(currentMDStore.getMdstore())) {
log.info("Adding store {}", currentMDStore.getMdstore());
try {
synchMDStoreIntoHDFS(
mdFormat, mdLayout, mdInterpretation, hdfsPath, fileSystem, mongoBaseUrl, mongoDb,
currentMDStore);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
final MDStoreInfo current = hdfsMDStores.get(currentMDStore.getMdstore());
// IF the key is present it means that in hdfs we have a path
// basePath/MDSTOREID/timestamp but the timestamp on hdfs is older that the
// new one in mongo so we have to synch the new mdstore and delete the old one
if (currentMDStore.getLatestTimestamp() > current.getLatestTimestamp()) {
log.info("Updating MDStore {}", currentMDStore.getMdstore());
final String mdstoreDir = createMDStoreDir(hdfsPath, currentMDStore.getMdstore());
final String rmPath = createMDStoreDir(mdstoreDir, current.getLatestTimestamp().toString());
try {
synchMDStoreIntoHDFS(
mdFormat, mdLayout, mdInterpretation, hdfsPath, fileSystem, mongoBaseUrl, mongoDb,
currentMDStore);
log.info("deleting {}", rmPath);
// DELETE THE OLD MDSTORE
fileSystem.delete(new Path(rmPath), true);
} catch (IOException e) {
throw new RuntimeException("Unable to synch and remove path " + rmPath, e);
}
}
}
};
}
/**
*This method store into hdfs all the MONGO record of a single mdstore into the HDFS File
*
* @param mdFormat the MDStore's format
* @param mdLayout the MDStore'slayout
* @param mdInterpretation the MDStore's interpretation
* @param hdfsPath the basePath into hdfs where all MD-stores are stored
* @param fileSystem The Hadoop File system client
* @param currentMDStore The current Mongo MDStore ID
* @throws IOException
*/
private static void synchMDStoreIntoHDFS(String mdFormat, String mdLayout, String mdInterpretation, String hdfsPath,
FileSystem fileSystem, final String mongoBaseUrl, final String mongoDb, MDStoreInfo currentMDStore)
throws IOException {
// FIRST CREATE the directory basePath/MDSTOREID
final String mdstoreDir = createMDStoreDir(hdfsPath, currentMDStore.getMdstore());
fileSystem.mkdirs(new Path(mdstoreDir));
// Then synch all the records into basePath/MDSTOREID/timestamp
final String currentIdDir = createMDStoreDir(mdstoreDir, currentMDStore.getLatestTimestamp().toString());
try (MigrateMongoMdstoresApplication app = new MigrateMongoMdstoresApplication(mongoBaseUrl, mongoDb,
currentIdDir)) {
app.execute(currentMDStore.getCurrentId(), mdFormat, mdLayout, mdInterpretation);
} catch (Exception e) {
throw new RuntimeException(
String
.format("Error on sync mdstore with ID %s into path %s", currentMDStore.getMdstore(), currentIdDir),
e);
}
log.info(String.format("Synchronized mdStore id : %s into path %s", currentMDStore.getMdstore(), currentIdDir));
}
public MigrateMongoMdstoresApplication(final String mongoBaseUrl, final String mongoDb, final String hdfsPath)
throws Exception {
super(hdfsPath);
this.mdstoreClient = new MdstoreClient(mongoBaseUrl, mongoDb);
}
public void execute(final String format, final String layout, final String interpretation) {
final Map<String, String> colls = mdstoreClient.validCollections(format, layout, interpretation);
log.info("Found {} mdstores", colls.size());
for (final Entry<String, String> entry : colls.entrySet()) {
log.info("Processing mdstore {} (collection: {})", entry.getKey(), entry.getValue());
final String currentColl = entry.getValue();
for (final String xml : mdstoreClient.listRecords(currentColl)) {
emit(xml, String.format("%s-%s-%s", format, layout, interpretation));
}
public void execute(final String currentColl, final String format, final String layout,
final String interpretation) {
for (final String xml : mdstoreClient.listRecords(currentColl)) {
emit(xml, String.format("%s-%s-%s", format, layout, interpretation));
}
}

View File

@ -124,7 +124,6 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
@Override
protected List<Instance> prepareInstances(
final Document doc,
final DataInfo info,
final KeyValue collectedfrom,
final KeyValue hostedby) {
@ -134,10 +133,10 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
instance.setCollectedfrom(collectedfrom);
instance.setHostedby(hostedby);
final List<StructuredProperty> alternateIdentifier = prepareResultPids(doc, info);
final List<StructuredProperty> alternateIdentifier = prepareResultPids(doc);
final List<StructuredProperty> pid = IdentifierFactory.getPids(alternateIdentifier, collectedfrom);
final Set<StructuredProperty> pids = pid.stream().collect(Collectors.toCollection(HashSet::new));
final Set<StructuredProperty> pids = new HashSet<>(pid);
instance
.setAlternateIdentifier(
@ -289,9 +288,9 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
}
@Override
protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) {
protected List<StructuredProperty> prepareResultPids(final Document doc) {
return prepareListStructPropsWithValidQualifier(
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES, info)
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES)
.stream()
.map(CleaningFunctions::normalizePidValue)
.collect(Collectors.toList());

View File

@ -127,7 +127,6 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
@Override
protected List<Instance> prepareInstances(
final Document doc,
final DataInfo info,
final KeyValue collectedfrom,
final KeyValue hostedby) {
@ -137,7 +136,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
instance.setCollectedfrom(collectedfrom);
instance.setHostedby(hostedby);
final List<StructuredProperty> alternateIdentifier = prepareResultPids(doc, info);
final List<StructuredProperty> alternateIdentifier = prepareResultPids(doc);
final List<StructuredProperty> pid = IdentifierFactory.getPids(alternateIdentifier, collectedfrom);
final Set<StructuredProperty> pids = pid.stream().collect(Collectors.toCollection(HashSet::new));
@ -419,24 +418,24 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
}
@Override
protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) {
protected List<StructuredProperty> prepareResultPids(final Document doc) {
final Set<StructuredProperty> res = new HashSet<>();
res
.addAll(
prepareListStructPropsWithValidQualifier(
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES, info));
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES));
res
.addAll(
prepareListStructPropsWithValidQualifier(
doc,
"//*[local-name()='identifier' and ./@identifierType != 'URL' and ./@identifierType != 'landingPage']",
"@identifierType", DNET_PID_TYPES, info));
"@identifierType", DNET_PID_TYPES));
res
.addAll(
prepareListStructPropsWithValidQualifier(
doc,
"//*[local-name()='alternateIdentifier' and ./@alternateIdentifierType != 'URL' and ./@alternateIdentifierType != 'landingPage']",
"@alternateIdentifierType", DNET_PID_TYPES, info));
"@alternateIdentifierType", DNET_PID_TYPES));
return res
.stream()

View File

@ -23,12 +23,13 @@ import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.oa.graph.raw.common.AbstractMigrationApplication;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import scala.Tuple2;
public class VerifyRecordsApplication {
public class VerifyRecordsApplication extends AbstractMigrationApplication {
private static final Logger log = LoggerFactory.getLogger(VerifyRecordsApplication.class);
@ -69,15 +70,13 @@ public class VerifyRecordsApplication {
private static void validateRecords(SparkSession spark, String sourcePaths, String invalidPath,
VocabularyGroup vocs) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
final List<String> existingSourcePaths = Arrays
.stream(sourcePaths.split(","))
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()))
.collect(Collectors.toList());
final List<String> existingSourcePaths = listEntityPaths(spark, sourcePaths);
log.info("Verify records in files:");
existingSourcePaths.forEach(log::info);
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
for (final String sp : existingSourcePaths) {
RDD<String> invalidRecords = sc
.sequenceFile(sp, Text.class, Text.class)

View File

@ -9,7 +9,6 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -17,19 +16,14 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
import eu.dnetlib.dhp.oa.graph.raw.OafToOafMapper;
import eu.dnetlib.dhp.oa.graph.raw.OdfToOafMapper;
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.utils.DHPUtils;
public class AbstractMigrationApplication implements Closeable {
@ -107,6 +101,15 @@ public class AbstractMigrationApplication implements Closeable {
}
}
protected static List<String> listEntityPaths(final SparkSession spark, final String paths) {
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
return Arrays
.stream(paths.split(","))
.filter(StringUtils::isNotBlank)
.filter(p -> HdfsSupport.exists(p, sc.hadoopConfiguration()) || p.contains("/*"))
.collect(Collectors.toList());
}
public ObjectMapper getObjectMapper() {
return objectMapper;
}

View File

@ -275,7 +275,7 @@
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=7680
--conf spark.sql.shuffle.partitions=10000
</spark-opts>
<arg>--betaInputPath</arg><arg>${betaInputGraphPath}/relation</arg>
<arg>--prodInputPath</arg><arg>${prodInputGraphPath}/relation</arg>

View File

@ -5,6 +5,12 @@
"paramDescription": "the path where storing the sequential file",
"paramRequired": true
},
{
"paramName": "n",
"paramLongName": "nameNode",
"paramDescription": "the hdfs Name node url",
"paramRequired": true
},
{
"paramName": "mongourl",
"paramLongName": "mongoBaseUrl",

View File

@ -214,16 +214,14 @@
<action name="ImportODF_claims">
<java>
<prepare>
<delete path="${contentPath}/odf_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/odf_claims</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>ODF</arg>
<arg>-l</arg><arg>store</arg>
<arg>-i</arg><arg>claim</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/odf_claims</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>ODF</arg>
<arg>--mdLayout</arg><arg>store</arg>
<arg>--mdInterpretation</arg><arg>claim</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
</java>
<ok to="reuse_oaf_claims"/>
<error to="Kill"/>
@ -239,16 +237,14 @@
<action name="ImportOAF_claims">
<java>
<prepare>
<delete path="${contentPath}/oaf_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/oaf_claims</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>OAF</arg>
<arg>-l</arg><arg>store</arg>
<arg>-i</arg><arg>claim</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_claims</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>OAF</arg>
<arg>--mdLayout</arg><arg>store</arg>
<arg>--mdInterpretation</arg><arg>claim</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
</java>
<ok to="wait_import"/>
<error to="Kill"/>
@ -291,16 +287,14 @@
<action name="ImportODF">
<java>
<prepare>
<delete path="${contentPath}/odf_records"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>ODF</arg>
<arg>--mdLayout</arg><arg>store</arg>
<arg>--mdInterpretation</arg><arg>cleaned</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
</java>
<ok to="reuse_oaf"/>
<error to="Kill"/>
@ -316,16 +310,14 @@
<action name="ImportOAF">
<java>
<prepare>
<delete path="${contentPath}/oaf_records"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>OAF</arg>
<arg>--mdLayout</arg><arg>store</arg>
<arg>--mdInterpretation</arg><arg>cleaned</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
</java>
<ok to="ImportOAF_invisible"/>
<error to="Kill"/>
@ -333,16 +325,14 @@
<action name="ImportOAF_invisible">
<java>
<prepare>
<delete path="${contentPath}/oaf_records_invisible"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records_invisible</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/mdstore</arg>
<arg>--mongoBaseUrl</arg><arg>${mongoURL}</arg>
<arg>--mongoDb</arg><arg>${mongoDb}</arg>
<arg>--mdFormat</arg><arg>OAF</arg>
<arg>--mdLayout</arg><arg>store</arg>
<arg>--mdInterpretation</arg><arg>intersection</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
</java>
<ok to="ImportODF_hdfs"/>
<error to="Kill"/>
@ -372,7 +362,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--hdfsPath</arg><arg>${contentPath}/odf_records_hdfs</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/odf_mdstore_hdfs</arg>
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
<arg>--mdFormat</arg><arg>ODF</arg>
<arg>--mdLayout</arg><arg>store</arg>
@ -406,7 +396,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_records_hdfs</arg>
<arg>--hdfsPath</arg><arg>${contentPath}/oaf_mdstore_hdfs</arg>
<arg>--mdstoreManagerUrl</arg><arg>${mdstoreManagerUrl}</arg>
<arg>--mdFormat</arg><arg>OAF</arg>
<arg>--mdLayout</arg><arg>store</arg>
@ -466,7 +456,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records_claim</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
</spark>
@ -490,7 +480,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims,${contentPath}/odf_claims</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_claims,${contentPath}/oaf_claims/*/*,${contentPath}/odf_claims/*/*</arg>
<arg>--targetPath</arg><arg>${workingDir}/entities_claim</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>
@ -539,7 +529,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_records,${contentPath}/odf_records,${contentPath}/oaf_records_hdfs,${contentPath}/odf_records_hdfs,${contentPath}/oaf_records_invisible</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_mdstore_hdfs,${contentPath}/odf_mdstore_hdfs,${contentPath}/mdstore/*/*</arg>
<arg>--invalidPath</arg><arg>${workingDir}/invalid_records</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
</spark>
@ -563,7 +553,7 @@
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
</spark-opts>
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_records,${contentPath}/odf_records,${contentPath}/oaf_records_hdfs,${contentPath}/odf_records_hdfs,${contentPath}/oaf_records_invisible</arg>
<arg>--sourcePaths</arg><arg>${contentPath}/db_openaire,${contentPath}/db_openorgs,${contentPath}/oaf_mdstore_hdfs,${contentPath}/odf_mdstore_hdfs,${contentPath}/mdstore/*/*</arg>
<arg>--targetPath</arg><arg>${workingDir}/entities</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--shouldHashId</arg><arg>${shouldHashId}</arg>

View File

@ -1,18 +0,0 @@
<configuration>
<property>
<name>jobTracker</name>
<value>yarnRM</value>
</property>
<property>
<name>nameNode</name>
<value>hdfs://nameservice1</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
</configuration>

View File

@ -1,162 +0,0 @@
<workflow-app name="import Claims as Graph" xmlns="uri:oozie:workflow:0.5">
<parameters>
<property>
<name>reuseContent</name>
<value>false</value>
<description>should import content from the aggregator or reuse a previous version</description>
</property>
<property>
<name>contentPath</name>
<description>path location to store (or reuse) content from the aggregator</description>
</property>
<property>
<name>postgresURL</name>
<description>the postgres URL to access to the database</description>
</property>
<property>
<name>postgresUser</name>
<description>the user postgres</description>
</property>
<property>
<name>postgresPassword</name>
<description>the password postgres</description>
</property>
<property>
<name>dbSchema</name>
<value>beta</value>
<description>the database schema according to the D-Net infrastructure (beta or production)</description>
</property>
<property>
<name>mongoURL</name>
<description>mongoDB url, example: mongodb://[username:password@]host[:port]</description>
</property>
<property>
<name>mongoDb</name>
<description>mongo database</description>
</property>
<property>
<name>isLookupUrl</name>
<description>the address of the lookUp service</description>
</property>
<property>
<name>nsPrefixBlacklist</name>
<value></value>
<description>a blacklist of nsprefixes (comma separeted)</description>
</property>
<property>
<name>sparkDriverMemory</name>
<description>memory for driver process</description>
</property>
<property>
<name>sparkExecutorMemory</name>
<description>memory for individual executor</description>
</property>
<property>
<name>sparkExecutorCores</name>
<description>number of cores used by single executor</description>
</property>
<property>
<name>oozieActionShareLibForSpark2</name>
<description>oozie action sharelib for spark 2.*</description>
</property>
<property>
<name>spark2ExtraListeners</name>
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
<description>spark 2.* extra listeners classname</description>
</property>
<property>
<name>spark2SqlQueryExecutionListeners</name>
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
<description>spark 2.* sql query execution listeners classname</description>
</property>
<property>
<name>spark2YarnHistoryServerAddress</name>
<description>spark 2.* yarn history server address</description>
</property>
<property>
<name>spark2EventLogDir</name>
<description>spark 2.* event log dir location</description>
</property>
</parameters>
<global>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapreduce.job.queuename</name>
<value>${queueName}</value>
</property>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${oozieLauncherQueueName}</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>${oozieActionShareLibForSpark2}</value>
</property>
</configuration>
</global>
<start to="ImportDB_claims"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<action name="ImportDB_claims">
<java>
<prepare>
<delete path="${contentPath}/db_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateDbEntitiesApplication</main-class>
<arg>--hdfsPath</arg><arg>${contentPath}/db_claims</arg>
<arg>--postgresUrl</arg><arg>${postgresURL}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
<arg>--isLookupUrl</arg><arg>${isLookupUrl}</arg>
<arg>--action</arg><arg>claims</arg>
<arg>--dbschema</arg><arg>${dbSchema}</arg>
<arg>--nsPrefixBlacklist</arg><arg>${nsPrefixBlacklist}</arg>
</java>
<ok to="ImportODF_claims"/>
<error to="Kill"/>
</action>
<action name="ImportODF_claims">
<java>
<prepare>
<delete path="${contentPath}/odf_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/odf_claims</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>ODF</arg>
<arg>-l</arg><arg>store</arg>
<arg>-i</arg><arg>claim</arg>
</java>
<ok to="ImportOAF_claims"/>
<error to="Kill"/>
</action>
<action name="ImportOAF_claims">
<java>
<prepare>
<delete path="${contentPath}/oaf_claims"/>
</prepare>
<main-class>eu.dnetlib.dhp.oa.graph.raw.MigrateMongoMdstoresApplication</main-class>
<arg>-p</arg><arg>${contentPath}/oaf_claims</arg>
<arg>-mongourl</arg><arg>${mongoURL}</arg>
<arg>-mongodb</arg><arg>${mongoDb}</arg>
<arg>-f</arg><arg>OAF</arg>
<arg>-l</arg><arg>store</arg>
<arg>-i</arg><arg>claim</arg>
</java>
<ok to="End"/>
<error to="Kill"/>
</action>
<end name="End"/>
</workflow-app>

View File

@ -1,18 +0,0 @@
<configuration>
<property>
<name>jobTracker</name>
<value>yarnRM</value>
</property>
<property>
<name>nameNode</name>
<value>hdfs://nameservice1</value>
</property>
<property>
<name>oozie.use.system.libpath</name>
<value>true</value>
</property>
<property>
<name>oozie.action.sharelib.for.spark</name>
<value>spark2</value>
</property>
</configuration>

Some files were not shown because too many files have changed in this diff Show More