Compare commits

...

56 Commits

Author SHA1 Message Date
George Kalampokis c84b0ab593 Merge remote-tracking branch 'origin/update-configs' into update-configs
# Conflicts:
#	dmp-backend/web/src/main/java/eu/eudat/logic/mapper/prefilling/PrefillingMapper.java
2022-05-17 09:55:15 +03:00
George Kalampokis 1f2084578d Minor Refactoring 2022-05-17 09:53:19 +03:00
George Kalampokis 6b6d63c128 Refactor fieldData mapping 2022-05-17 09:53:19 +03:00
George Kalampokis b012f415a7 Add background scheduled tasks in order to delete unused files (when and how frequent they run is configurable) 2022-05-17 09:53:19 +03:00
George Kalampokis 85b37dd16d Replace CrossOrigin annotaion with global configuration 2022-05-17 09:53:19 +03:00
George Kalampokis 1d1226dc5f Resolve some additional maven depedency deprecations and improve performance 2022-05-17 09:53:18 +03:00
George Kalampokis 93deb96f99 Update and clean maven depedencies (possible breaking changes) 2022-05-17 09:53:09 +03:00
George Kalampokis 2c94cfa130 Improve Oauth2 Dialog 2022-05-17 09:51:46 +03:00
George Kalampokis 6542841c6c Update conf files 2022-05-17 09:51:46 +03:00
George Kalampokis 5916226a6f Remove unused maven depedencies 2022-05-17 09:51:46 +03:00
George Kalampokis af91a009db Remove http logger configurations (unused) 2022-05-17 09:51:46 +03:00
George Kalampokis 4434699f93 Remove unreliable plugin update on logstash 2022-05-17 09:51:46 +03:00
George Kalampokis a3da159564 Minor improvement over logstash pipeline 2022-05-17 09:51:46 +03:00
George Kalampokis dea57b1cf8 Replace the old application.properties with application.yml 2022-05-17 09:51:45 +03:00
George Kalampokis 4373d6db61 Remove unused module on the frontend 2022-05-17 09:51:45 +03:00
George Kalampokis 53d38088cd Minor Refactoring 2022-04-29 13:32:22 +03:00
George Kalampokis d7653f374c Refactor fieldData mapping 2022-04-18 18:38:58 +03:00
George Kalampokis 198acac6b2 Add background scheduled tasks in order to delete unused files (when and how frequent they run is configurable) 2022-04-18 18:38:22 +03:00
George Kalampokis f63d6b33ed Replace CrossOrigin annotaion with global configuration 2022-04-18 18:32:25 +03:00
George Kalampokis f5873ab9b0 Resolve some additional maven depedency deprecations and improve performance 2022-04-18 18:30:15 +03:00
George Kalampokis a9d6ab89f5 Update and clean maven depedencies (possible breaking changes) 2022-04-18 18:26:20 +03:00
George Kalampokis 39cd57e4a9 Improve Oauth2 Dialog 2022-04-15 14:01:46 +03:00
George Kalampokis 9eec5daea1 Update conf files 2022-04-15 13:08:36 +03:00
George Kalampokis 65575048fe Remove unused maven depedencies 2022-04-12 16:39:05 +03:00
George Kalampokis df061051a3 Remove http logger configurations (unused) 2022-04-12 12:06:14 +03:00
George Kalampokis ee356c047d Refactor Zenodo deposit creation. Add model for mapping instead of relying on stringBuilder 2022-04-07 17:30:40 +03:00
George Kalampokis 8a8993b235 Fix critical issue when importing from xml with researchers 2022-04-07 17:28:39 +03:00
George Kalampokis 734dc94f26 Change Dataset sorting on DMP overview and editor from last modified to last created 2022-04-05 15:27:58 +03:00
George Kalampokis 7741a82b28 Merge remote-tracking branch 'origin/update-configs' into update-configs 2022-04-05 15:19:11 +03:00
George Kalampokis 1dd971c4aa Remove unreliable plugin update on logstash 2022-04-05 15:18:53 +03:00
George Kalampokis 4317feacd7 Minor improvement over logstash pipeline 2022-04-05 15:18:53 +03:00
George Kalampokis 88568f6dcf Replace the old application.properties with application.yml 2022-04-05 15:18:53 +03:00
George Kalampokis c6fea04fb4 Remove unused module on the frontend 2022-04-05 15:18:53 +03:00
George Kalampokis 41e8b40c40 Merge remote-tracking branch 'origin/update-configs' into update-configs 2022-04-01 18:15:42 +03:00
George Kalampokis f9027c8cc3 Remove unreliable plugin update on logstash 2022-04-01 18:15:24 +03:00
George Kalampokis 1c275c4d2b Minor improvement over logstash pipeline 2022-04-01 18:15:24 +03:00
George Kalampokis c0aac45f39 Replace the old application.properties with application.yml 2022-04-01 18:15:24 +03:00
George Kalampokis a51aa3aeb5 Remove unused module on the frontend 2022-04-01 18:15:24 +03:00
George Kalampokis 2c00dab450 Make DMP export to be more consistent with the DMP shown on the front end UI 2022-03-29 15:12:56 +03:00
George Kalampokis 2666568fab Make Dataset Validation checker to no longer be exception depedant 2022-03-29 15:11:55 +03:00
George Kalampokis cf1b2489a3 Fix critical issue with xml import 2022-03-29 11:30:56 +03:00
George Kalampokis 5938e4d6f3 Merge remote-tracking branch 'origin/update-configs' into update-configs 2022-03-29 10:58:35 +03:00
George Kalampokis 3fff5e1d2e Remove unreliable plugin update on logstash 2022-03-29 10:58:11 +03:00
George Kalampokis 9c9919fd85 Minor improvement over logstash pipeline 2022-03-29 10:58:11 +03:00
George Kalampokis 063491ef21 Replace the old application.properties with application.yml 2022-03-29 10:58:11 +03:00
George Kalampokis 57420c1403 Remove unused module on the frontend 2022-03-29 10:58:11 +03:00
George Kalampokis ed7cd0f852 Fix issue with DMP json export if it have been imported from json 2022-03-29 10:55:49 +03:00
George Kalampokis 4941a9fa95 Merge remote-tracking branch 'origin/update-configs' into update-configs 2022-03-23 11:31:21 +02:00
George Kalampokis 765ddf752a Remove unreliable plugin update on logstash 2022-03-23 11:31:10 +02:00
George Kalampokis 6ddf8103f2 Minor improvement over logstash pipeline 2022-03-23 11:31:10 +02:00
George Kalampokis 54173dc5b9 Replace the old application.properties with application.yml 2022-03-23 11:31:10 +02:00
George Kalampokis f20cacc312 Remove unused module on the frontend 2022-03-23 11:31:10 +02:00
George Kalampokis ac4886b477 Remove unreliable plugin update on logstash 2022-03-21 17:20:39 +02:00
George Kalampokis 6e8794af1d Minor improvement over logstash pipeline 2022-03-21 15:59:15 +02:00
George Kalampokis 4c651c6200 Replace the old application.properties with application.yml 2022-03-21 15:55:44 +02:00
George Kalampokis 674cbbf384 Remove unused module on the frontend 2022-03-21 15:10:28 +02:00
119 changed files with 1404 additions and 2578 deletions

View File

@ -16,5 +16,5 @@ FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json
RUN logstash-plugin update logstash-input-beats
RUN logstash-plugin update logstash-filter-grok
#RUN logstash-plugin update logstash-input-beats
#RUN logstash-plugin update logstash-filter-grok

View File

@ -2,18 +2,11 @@ input {
pipeline { address => open_dmp_send_to_elastic }
}
filter {
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
index => "opendmp.logs"
user => elastic
password =>
index =>"opendmp.logs"
#manage_template => true
#template => "/usr/share/logstash/templates/audit/openDMP.json"
#template_name => "cite.elas.openDMP-audit*"
#template_overwrite => true
password => ""
}
}

View File

@ -11,6 +11,8 @@ import eu.eudat.queryable.QueryableList;
import eu.eudat.queryable.types.FieldSelectionType;
import eu.eudat.queryable.types.SelectionField;
import eu.eudat.types.grant.GrantStateType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
@ -25,6 +27,7 @@ import java.util.concurrent.CompletableFuture;
@Component("datasetDao")
public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDao {
private static final Logger logger = LoggerFactory.getLogger(DatasetDaoImpl.class);
@Autowired
public DatasetDaoImpl(DatabaseService<Dataset> databaseService) { super(databaseService); }
@ -99,6 +102,7 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
@Override
public Dataset createOrUpdate(Dataset item) {
logger.info("I'm Here " + item.getLabel());
return getDatabaseService().createOrUpdate(item, Dataset.class);
}

View File

@ -11,7 +11,6 @@ import eu.eudat.types.grant.GrantStateType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import schemasMicrosoftComOfficeOffice.LeftDocument;
import javax.persistence.criteria.JoinType;
import java.util.Date;

View File

@ -17,6 +17,7 @@ import java.io.IOException;
*/
public abstract class ElasticRepository<T extends ElasticEntity,C extends Criteria> implements Repository<T,C> {
private static final Logger logger = LoggerFactory.getLogger(ElasticRepository.class);
private final ObjectMapper mapper;
private RestHighLevelClient client;
public RestHighLevelClient getClient() {
@ -32,10 +33,10 @@ public abstract class ElasticRepository<T extends ElasticEntity,C extends Criter
logger.warn("Unable to connect to Elastic Services");
this.client = null;
}
this.mapper = new ObjectMapper();
}
public <T> T transformFromString(String value, Class<T> tClass) {
ObjectMapper mapper = new ObjectMapper();
T item = null;
try {
item = mapper.readValue(value, tClass);

View File

@ -10,7 +10,7 @@
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.5.2</version>
<version>2.6.6</version>
</parent>
<modules>
@ -27,166 +27,168 @@
<java.version>1.8</java.version>
<dmp-backend-commons.version>0.0.1-SNAPSHOT</dmp-backend-commons.version>
<org.springframework.version>5.3.8</org.springframework.version>
<!--<org.springframework.version>5.3.8</org.springframework.version>-->
<!-- <org.springframeweu.eudat.logic.securityrity.version>3.2.10.RELEASE</org.springframeweu.eudat.logic.securityrity.version> -->
<org.springframework.security.version>5.3.10.RELEASE</org.springframework.security.version>
<!-- <org.springframework.security.version>5.3.10.RELEASE</org.springframework.security.version>-->
<!--<com.sun.jersey.version>1.19.1</com.sun.jersey.version>-->
<!--
<!--
<org.apache.tomcat.tomcat-jdbc.version>7.0.35</org.apache.tomcat.tomcat-jdbc.version>
-->
-->
<!--<com.fasterxml.jackson>2.9.0</com.fasterxml.jackson>-->
<hibernate.version>5.5.3.Final</hibernate.version>
<!-- <hibernate.version>5.5.3.Final</hibernate.version>-->
<commons-codec.version>1.9</commons-codec.version>
<org.junit.version>4.11</org.junit.version>
<log4j.version>1.2.17</log4j.version>
<!-- <commons-codec.version>1.9</commons-codec.version>-->
<!-- <org.junit.version>4.11</org.junit.version>-->
<!-- <log4j.version>1.2.17</log4j.version>-->
<log4j2.version>2.15.0</log4j2.version>
<slf4j.version>1.7.12</slf4j.version>
<!-- <slf4j.version>1.7.12</slf4j.version>-->
<!--<jetty.version>11.0.5
</jetty.version>--> <!-- Adapt this to a version found on http://repo.maven.apache.org/maven2/org/eclipse/jetty/jetty-maven-plugin/ -->
<logback.version>1.2.3</logback.version>
<!-- <logback.version>1.2.3</logback.version>-->
<!-- <javax.inject.version>1</javax.inject.version>-->
<!--<javax.servlet.servlet-api.version>3.0.1</javax.servlet.servlet-api.version>-->
<apache.poi.version>5.2.2</apache.poi.version>
<jackson.version>2.12.3</jackson.version>
<elasticsearch.version>7.6.2</elasticsearch.version>
<jaxb.version>2.3.0</jaxb.version>
<caffeine.version>2.9.0</caffeine.version>
<google.api.version>1.33.4</google.api.version>
<json.path.version>2.4.0</json.path.version>
<postgresql.version>42.2.22</postgresql.version>
<social.facebook.version>2.0.3.RELEASE</social.facebook.version>
<social.linkedin.version>1.0.2.RELEASE</social.linkedin.version>
<social.twitter.version>1.1.2.RELEASE</social.twitter.version>
<apache.commons.lang3.version>3.12.0</apache.commons.lang3.version>
</properties>
<dependencies>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20160810</version>
</dependency>
<dependency>
<!-- Core Spring Boot Dependencies -->
<!--<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${org.springframework.version}</version>
</dependency>
</dependency>-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.elasticsearch/elasticsearch -->
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.7.0</version>
<exclusions>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore-nio</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.12</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore-nio</artifactId>
<version>4.4.13</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.13</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.6.0</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
<version>${hibernate.version}</version>
</dependency>
<!-- jdbc Driver -->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<!-- Elasticsearch -->
<!-- https://mvnrepository.com/artifact/org.elasticsearch/elasticsearch -->
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<!-- Autocomplete cache -->
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>${caffeine.version}</version>
</dependency>
<!-- Used for Google login ONLY -->
<!-- https://mvnrepository.com/artifact/com.google.api-client/google-api-client -->
<dependency>
<groupId>com.google.api-client</groupId>
<artifactId>google-api-client</artifactId>
<version>1.23.0</version>
<version>${google.api.version}</version>
</dependency>
<!-- facebook Login -->
<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-facebook</artifactId>
<version>${social.facebook.version}</version>
</dependency>
<!-- linkedin Login -->
<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-linkedin</artifactId>
<version>${social.linkedin.version}</version>
</dependency>
<!-- twitter login-->
<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-twitter</artifactId>
<version>${social.twitter.version}</version>
</dependency>
<!-- Used for Remote Fetcher -->
<!-- https://mvnrepository.com/artifact/com.jayway.jsonpath/json-path -->
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<version>2.4.0</version>
<version>${json.path.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml -->
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-xml</artifactId>
<version>2.12.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackeu.eudat.corecore/jackeu.eudat.corecore -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.12.3</version>
<version>${jackson.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackeu.eudat.corecore/jackson-databind -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.12.3</version>
<version>${jackson.version}</version>
</dependency>
<!-- g/a spring -->
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackeu.eudat.corecore/jackson-databind -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Various libs -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.5</version>
<version>${apache.commons.lang3.version}</version>
</dependency>
<!-- Docx export -->
<!-- https://mvnrepository.com/artifact/org.apache.poi/poi -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>4.0.0</version>
<version>${apache.poi.version}</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>4.0.0</version>
<version>${apache.poi.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.xmlgraphics/fop -->
<!-- Used for the HTML to DocX export -->
<!-- https://mvnrepository.com/artifact/fr.opensagres.xdocreport/org.apache.poi.xwpf.convereu.eudat.corecore -->
<dependency>
<groupId>org.apache.xmlgraphics</groupId>
<artifactId>fop</artifactId>
<version>2.3</version>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>org.apache.poi.xwpf.converter.core</artifactId>
<version>1.0.6</version>
</dependency>
<dependency>
@ -194,77 +196,38 @@
<artifactId>jsoup</artifactId>
<version>1.14.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/fr.opensagres.xdocreport/org.apache.poi.xwpf.converter.pdf -->
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>org.apache.poi.xwpf.converter.pdf</artifactId>
<version>1.0.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/fr.opensagres.xdocreport/org.apache.poi.xwpf.convereu.eudat.corecore -->
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>org.apache.poi.xwpf.converter.core</artifactId>
<version>1.0.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/fr.opensagres.xdocreport/fr.opensagres.xdocreport.itext.extension -->
<dependency>
<groupId>fr.opensagres.xdocreport</groupId>
<artifactId>fr.opensagres.xdocreport.itext.extension</artifactId>
<version>2.0.1</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.1</version>
</dependency>
<!--<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
</dependency>-->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>7.6.0</version>
</dependency>
<!--<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-to-slf4j</artifactId>
<version>2.8.2</version>
</dependency>-->
<!-- XML Binding for (mainly) the templates -->
<!-- https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.3.1</version>
<version>${jaxb.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jaxb/jaxb-core -->
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-core</artifactId>
<version>2.3.0</version>
<version>${jaxb.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.glassfish.jaxb/jaxb-runtime -->
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
<version>2.3.1</version>
<version>${jaxb.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api -->
<dependency>
<!--<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.3.1</version>
</dependency>
</dependency>-->
<!-- Model Validation -->
<!-- https://mvnrepository.com/artifact/javax.validation/validation-api -->
<dependency>
<groupId>javax.validation</groupId>
@ -272,37 +235,11 @@
<version>2.0.1.Final</version>
</dependency>
<!-- The client -->
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient</artifactId>
<version>0.11.0</version>
</dependency>
<!-- Hotspot JVM metrics-->
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_hotspot</artifactId>
<version>0.11.0</version>
</dependency>
<!-- Exposition HTTPServer-->
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_httpserver</artifactId>
<version>0.11.0</version>
</dependency>
<!-- Pushgateway exposition-->
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_pushgateway</artifactId>
<version>0.11.0</version>
</dependency>
<!-- Prometheus Analytics -->
<!-- https://mvnrepository.com/artifact/io.micrometer/micrometer-registry-prometheus -->
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-registry-prometheus</artifactId>
<version>1.7.1</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@ -1,7 +1,5 @@
package eu.eudat.queryable.collector;
import com.google.common.collect.Lists;
import javax.persistence.Tuple;
import java.util.*;
import java.util.stream.Collectors;
@ -68,7 +66,8 @@ public class ProjectionField {
list.add(current.parent.key);
current = current.parent;
}
return String.join(".", Lists.reverse(list));
Collections.reverse(list);
return String.join(".", list);
}
private Object createObject(List<Tuple> tuples, String field) {

View File

@ -42,9 +42,12 @@ public class QueryableHibernateList<T extends DataEntity> implements QueryableLi
private String hint;
private Map<String, Join> joinsMap = new HashMap<>();
private final ObjectMapper mapper;
public QueryableHibernateList(EntityManager manager, Class<T> tClass) {
this.manager = manager;
this.tClass = tClass;
this.mapper = new ObjectMapper();
}
public QueryableHibernateList<T> setManager(EntityManager manager) {
@ -246,7 +249,6 @@ public class QueryableHibernateList<T extends DataEntity> implements QueryableLi
if (!this.fields.isEmpty()) this.selectFields();
if (distinct) this.query.distinct(true);
//if (!this.fields.isEmpty()) this.query.multiselect(this.parseFields(this.fields));
ObjectMapper mapper = new ObjectMapper();
if (!this.fields.isEmpty()) return this.toListWithFields().stream().map(m -> mapper.convertValue(m, this.tClass)).collect(Collectors.toList());
return this.toListWithOutFields();
}

View File

@ -50,7 +50,6 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.12.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework/spring-context -->
@ -71,55 +70,45 @@
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>2.9.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/junit/junit -->
<dependency>
<!--<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.7.2</version>
<scope>test</scope>
</dependency>
</dependency>-->
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.22</version>
</dependency>
<dependency>
<!--<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>3.0.0</version>
</dependency>
</dependency>-->
<!-- facebook Login -->
<dependency>
<!--<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-facebook</artifactId>
<version>2.0.3.RELEASE</version>
</dependency>
<!-- linkedin Login -->
&lt;!&ndash; linkedin Login &ndash;&gt;
<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-linkedin</artifactId>
<version>1.0.2.RELEASE</version>
</dependency>
<!-- tweeter login-->
&lt;!&ndash; tweeter login&ndash;&gt;
<dependency>
<groupId>org.springframework.social</groupId>
<artifactId>spring-social-twitter</artifactId>
<version>1.1.2.RELEASE</version>
</dependency>
<dependency>
</dependency>-->
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</dependency>
</dependency>-->
</dependencies>
<build>

View File

@ -1,30 +0,0 @@
package eu.eudat.configurations;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.module.SimpleModule;
import eu.eudat.criteria.entities.Criteria;
import eu.eudat.criteria.serialzier.CriteriaSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.ArrayList;
@Configuration
public class JacksonConfiguration {
@Bean
public ObjectMapper buildObjectMapper() {
ArrayList<Module> modules = new ArrayList<>();
SimpleModule criteriaSerializerModule = new SimpleModule();
criteriaSerializerModule.addDeserializer(Criteria.class, new CriteriaSerializer());
modules.add(criteriaSerializerModule);
return new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModules(modules);
}
}

View File

@ -1,37 +1,43 @@
package eu.eudat.configurations;
import eu.eudat.controllers.interceptors.RequestInterceptor;
import eu.eudat.logic.handlers.PrincipalArgumentResolver;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.logic.services.operations.authentication.AuthenticationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import java.util.List;
@EnableAsync
@Configuration
@EnableScheduling
public class WebMVCConfiguration extends WebMvcConfigurerAdapter {
public class WebMVCConfiguration implements WebMvcConfigurer {
private ApiContext apiContext;
private AuthenticationService verifiedUserAuthenticationService;
private AuthenticationService nonVerifiedUserAuthenticationService;
private final AuthenticationService verifiedUserAuthenticationService;
private final AuthenticationService nonVerifiedUserAuthenticationService;
@Autowired
public WebMVCConfiguration(ApiContext apiContext, AuthenticationService verifiedUserAuthenticationService, AuthenticationService nonVerifiedUserAuthenticationService) {
private final Environment environment;
public WebMVCConfiguration(ApiContext apiContext, AuthenticationService verifiedUserAuthenticationService, AuthenticationService nonVerifiedUserAuthenticationService, Environment environment) {
this.apiContext = apiContext;
this.verifiedUserAuthenticationService = verifiedUserAuthenticationService;
this.nonVerifiedUserAuthenticationService = nonVerifiedUserAuthenticationService;
this.environment = environment;
}
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/api/**").allowedOrigins(this.environment.getProperty("dmp.domain"));
}
@Autowired
@Override
public void addArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
argumentResolvers.add(new PrincipalArgumentResolver(verifiedUserAuthenticationService, nonVerifiedUserAuthenticationService));

View File

@ -0,0 +1,9 @@
package eu.eudat.configurations.file;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
@Configuration
@EnableConfigurationProperties({TmpProperties.class, PermProperties.class})
public class FileTimeConfiguration {
}

View File

@ -0,0 +1,17 @@
package eu.eudat.configurations.file;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(prefix = "perm")
public class PermProperties {
private TimeProperties time;
public TimeProperties getTime() {
return time;
}
public void setTime(TimeProperties time) {
this.time = time;
}
}

View File

@ -0,0 +1,59 @@
package eu.eudat.configurations.file;
public class StartTimeProperties {
private Integer month;
private String dayOfWeekName;
private Integer dayOfWeek;
private Integer dayOfMonth;
private Integer hour;
private Integer minute;
public Integer getMonth() {
return month;
}
public void setMonth(Integer month) {
this.month = month;
}
public String getDayOfWeekName() {
return dayOfWeekName;
}
public void setDayOfWeekName(String dayOfWeekName) {
this.dayOfWeekName = dayOfWeekName;
}
public Integer getDayOfWeek() {
return dayOfWeek;
}
public void setDayOfWeek(Integer dayOfWeek) {
this.dayOfWeek = dayOfWeek;
}
public Integer getDayOfMonth() {
return dayOfMonth;
}
public void setDayOfMonth(Integer dayOfMonth) {
this.dayOfMonth = dayOfMonth;
}
public Integer getHour() {
return hour;
}
public void setHour(Integer hour) {
this.hour = hour;
}
public Integer getMinute() {
return minute;
}
public void setMinute(Integer minute) {
this.minute = minute;
}
}

View File

@ -0,0 +1,33 @@
package eu.eudat.configurations.file;
import java.time.temporal.ChronoUnit;
public class TimeProperties {
private Integer interval;
private ChronoUnit timeUnit;
private StartTimeProperties startTime;
public Integer getInterval() {
return interval;
}
public void setInterval(Integer interval) {
this.interval = interval;
}
public ChronoUnit getTimeUnit() {
return timeUnit;
}
public void setTimeUnit(ChronoUnit timeUnit) {
this.timeUnit = timeUnit;
}
public StartTimeProperties getStartTime() {
return startTime;
}
public void setStartTime(StartTimeProperties startTime) {
this.startTime = startTime;
}
}

View File

@ -0,0 +1,17 @@
package eu.eudat.configurations.file;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(prefix = "tmp")
public class TmpProperties {
private TimeProperties time;
public TimeProperties getTime() {
return time;
}
public void setTime(TimeProperties time) {
this.time = time;
}
}

View File

@ -38,7 +38,6 @@ import static eu.eudat.types.Authorities.ADMIN;
import static eu.eudat.types.Authorities.DATASET_PROFILE_MANAGER;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/admin/"})
public class Admin extends BaseController {

View File

@ -18,7 +18,6 @@ import java.util.List;
* Created by ikalyvas on 3/28/2018.
*/
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/common"})
public class CommonController {

View File

@ -15,7 +15,6 @@ import org.springframework.web.bind.annotation.*;
import javax.transaction.Transactional;
@RestController
@CrossOrigin
@RequestMapping(value = "api/contactEmail")
public class ContactEmail {
private static final Logger logger = LoggerFactory.getLogger(ContactEmail.class);

View File

@ -12,7 +12,6 @@ import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = "api/currency")
public class CurrencyController {

View File

@ -31,7 +31,6 @@ import static eu.eudat.types.Authorities.ADMIN;
* Created by ikalyvas on 3/21/2018.
*/
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/dmpprofile"})
public class DMPProfileController extends BaseController {

View File

@ -62,7 +62,6 @@ import static org.springframework.http.MediaType.*;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/dmps/"})
public class DMPs extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(DMPs.class);

View File

@ -22,11 +22,10 @@ import java.io.IOException;
import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class DashBoardController extends BaseController {
private DashBoardManager dashBoardManager;
private final DashBoardManager dashBoardManager;
@Autowired
public DashBoardController(ApiContext apiContext, DashBoardManager dashBoardManager) {
super(apiContext);

View File

@ -19,7 +19,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/datarepos"})
public class DataRepositories extends BaseController {

View File

@ -9,9 +9,7 @@ import eu.eudat.models.data.components.commons.datafield.AutoCompleteData;
import eu.eudat.models.data.externaldataset.ExternalAutocompleteFieldModel;
import eu.eudat.models.data.helpers.common.AutoCompleteLookupItem;
import eu.eudat.models.data.helpers.responses.ResponseItem;
import eu.eudat.models.data.properties.PropertiesModel;
import eu.eudat.models.data.security.Principal;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
@ -19,15 +17,12 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import javax.xml.xpath.XPathExpressionException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import static eu.eudat.types.Authorities.ADMIN;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class DatasetProfileController extends BaseController {

View File

@ -17,7 +17,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class DatasetProfiles extends BaseController {

View File

@ -59,7 +59,6 @@ import static eu.eudat.types.Authorities.ANONYMOUS;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/datasets/"})
public class Datasets extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(Datasets.class);

View File

@ -16,7 +16,6 @@ import org.springframework.web.bind.annotation.*;
import javax.transaction.Transactional;
@RestController
@CrossOrigin
@RequestMapping(value = "api/emailConfirmation")
public class EmailConfirmation {

View File

@ -18,7 +18,6 @@ import org.springframework.web.bind.annotation.*;
import javax.transaction.Transactional;
@RestController
@CrossOrigin
@RequestMapping(value = "api/emailMergeConfirmation")
public class EmailMergeConfirmation {

View File

@ -24,7 +24,6 @@ import java.util.UUID;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class ExternalDatasets extends BaseController {

View File

@ -1,6 +1,6 @@
package eu.eudat.controllers;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.FileUpload;
@ -12,14 +12,15 @@ import eu.eudat.logic.services.operations.DatabaseRepository;
import eu.eudat.logic.utilities.documents.helpers.FileEnvelope;
import eu.eudat.logic.utilities.json.JsonSearcher;
import eu.eudat.models.HintedModelFactory;
import eu.eudat.models.data.components.commons.datafield.UploadData;
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
import eu.eudat.models.data.helpers.responses.ResponseItem;
import eu.eudat.models.data.security.Principal;
import eu.eudat.types.ApiMessageCode;
import eu.eudat.types.Authorities;
import org.apache.poi.util.IOUtils;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.HttpHeaders;
@ -32,25 +33,28 @@ import org.springframework.web.multipart.MultipartFile;
import javax.transaction.Transactional;
import java.io.*;
import java.nio.file.Files;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.HashSet;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/file/"})
public class FileController {
private static final Logger logger = LoggerFactory.getLogger(FileController.class);
private DatasetProfileManager datasetProfileManager;
private final Environment environment;
private DatabaseRepository databaseRepository;
private final ObjectMapper objectMapper;
@Autowired
public FileController(DatasetProfileManager datasetProfileManager, Environment environment, ApiContext apiContext) {
this.datasetProfileManager = datasetProfileManager;
this.environment = environment;
this.databaseRepository = apiContext.getOperationsContext().getDatabaseRepository();
this.objectMapper = apiContext.getUtilitiesService().getGenericObjectMapper();
}
@RequestMapping(method = RequestMethod.POST, value = {"/upload"})
@ -62,57 +66,48 @@ public class FileController {
eu.eudat.models.data.admin.composite.DatasetProfile datasetprofile = this.datasetProfileManager.getDatasetProfile(datasetProfileId);
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
String json = mapper.writeValueAsString(datasetprofile.getSections());;
JsonNode propertiesJson = mapper.readTree(json);
String json = objectMapper.writeValueAsString(datasetprofile.getSections());
JsonNode propertiesJson = objectMapper.readTree(json);
Set<JsonNode> fieldNodes = new HashSet<>();
fieldNodes.addAll(JsonSearcher.findNodes(propertiesJson, "id", fieldId, false));
// AtomicReference<String> exceptionMessage = null;
AtomicBoolean acceptedFile = new AtomicBoolean(false);
fieldNodes.forEach(node -> {
Boolean acceptedFile = false;
for (JsonNode node : fieldNodes) {
JsonNode data = node.get("data");
if (data != null && !data.toString().equals("\"\"") && !data.toString().equals("null")) {
String stringValue = data.toString().replaceAll("=", ":");
JSONObject dataObj = new JSONObject(stringValue);
Map<String, Object> dataMap = ((JSONObject) dataObj).toMap();
if(dataMap.get("maxFileSizeInMB") != null && !dataMap.get("maxFileSizeInMB").toString().equals("\"\"") && !dataMap.get("maxFileSizeInMB").toString().equals("null")) {
if (file.getSize() <= Integer.parseInt(dataMap.get("maxFileSizeInMB").toString())*1048576) {
acceptedFile.set(true);
}
// else {
// exceptionMessage.set("The file is too large. Max file upload is " + dataMap.get("maxFileSizeInMB").toString() + " MB.");
// }
}
if(acceptedFile.get() && data.get("types") != null && !data.get("types").toString().equals("\"\"") && !data.get("types").toString().equals("null")) {
acceptedFile.set(false);
JSONArray types = new JSONArray(data.get("types").toString());
types.iterator().forEachRemaining(element -> {
Map<String, Object> typesMap = ((JSONObject) element).toMap();
if(typesMap.get("value") != null && !typesMap.get("value").toString().equals("\"\"") && !typesMap.get("value").toString().equals("null")) {
if(file.getContentType().equals(typesMap.get("value").toString())) {
acceptedFile.set(true);
try {
UploadData uploadData = objectMapper.treeToValue(data, UploadData.class);
if (uploadData != null) {
if (uploadData.getMaxFileSizeInMB() != null) {
if (file.getSize() <= (uploadData.getMaxFileSizeInMB() * Math.pow(1024, 2))) {
acceptedFile = true;
}
}
});
if (acceptedFile && uploadData.getTypes() != null && !uploadData.getTypes().isEmpty()) {
acceptedFile = false;
for (UploadData.Option option : uploadData.getTypes()) {
if (option.getValue() != null) {
if (file.getContentType().equals(option.getValue())) {
acceptedFile = true;
}
}
}
}
}
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
}
// if(!acceptedFile.get()) {
// exceptionMessage.set("The file type is not accepted.");
// }
}
});
if(!acceptedFile.get()) {
if (!acceptedFile) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("The uploaded file is too large or has an unaccepted type"));
}
File convFile = new File(this.environment.getProperty("temp.temp") + uuid);
convFile.createNewFile();
FileOutputStream fos = new FileOutputStream(convFile);
fos.write(file.getBytes());
fos.close();

View File

@ -16,7 +16,6 @@ import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/funders/"})
public class Funders extends BaseController {
private FunderManager funderManager;

View File

@ -28,7 +28,6 @@ import static eu.eudat.types.Authorities.ANONYMOUS;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/grants/"})
public class Grants extends BaseController {
private GrantManager grantManager;

View File

@ -19,7 +19,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/journals"})
public class JournalsController extends BaseController {

View File

@ -15,7 +15,6 @@ import java.io.*;
import java.util.UUID;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/language/"})
public class LanguageController {

View File

@ -17,7 +17,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/licenses"})
public class Licenses extends BaseController {

View File

@ -14,7 +14,6 @@ import org.springframework.web.bind.annotation.*;
import java.util.UUID;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/lock/"})
public class LockController {

View File

@ -50,7 +50,6 @@ import java.security.GeneralSecurityException;
@RestController
@CrossOrigin
@RequestMapping(value = "api/auth")
public class Login {
private static final Logger logger = LoggerFactory.getLogger(Login.class);

View File

@ -20,7 +20,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class Organisations extends BaseController {

View File

@ -14,7 +14,6 @@ import java.util.List;
import java.util.UUID;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class Prefillings {

View File

@ -17,7 +17,6 @@ import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/projects/"})
public class Projects extends BaseController {
private ProjectManager projectManager;

View File

@ -17,7 +17,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/pubrepos"})
public class PubRepositoriesController extends BaseController {

View File

@ -17,7 +17,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/publications"})
public class PublicationsController extends BaseController {

View File

@ -26,7 +26,6 @@ import javax.validation.Valid;
import java.util.UUID;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/quick-wizard/"})
public class QuickWizardController extends BaseController {

View File

@ -19,7 +19,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class Registries extends BaseController {

View File

@ -21,7 +21,6 @@ import java.util.Map;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/researchers"})
public class Researchers extends BaseController {

View File

@ -19,7 +19,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class Services extends BaseController {

View File

@ -25,7 +25,6 @@ import java.util.stream.Collectors;
* Created by ikalyvas on 7/5/2018.
*/
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class TagController extends BaseController {

View File

@ -17,7 +17,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/external/taxonomies"})
public class TaxonomiesController extends BaseController {

View File

@ -26,7 +26,6 @@ import java.util.stream.Stream;
import static eu.eudat.types.Authorities.ADMIN;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/userguide/"})
public class UserGuideController {

View File

@ -22,7 +22,6 @@ import java.util.UUID;
@RequestMapping("api/invite/")
@RestController
@CrossOrigin
public class UserInvitationController extends BaseController {
private InvitationsManager invitationsManager;

View File

@ -31,7 +31,6 @@ import static eu.eudat.types.Authorities.ADMIN;
@RestController
@CrossOrigin
@RequestMapping(value = "api/user")
public class Users extends BaseController {

View File

@ -20,7 +20,6 @@ import java.util.List;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api"})
public class Validation extends BaseController {

View File

@ -6,6 +6,7 @@ import eu.eudat.models.data.security.Principal;
import eu.eudat.types.ApiMessageCode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.Order;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
@ -18,7 +19,7 @@ import javax.annotation.Priority;
* Created by ikalyvas on 6/12/2018.
*/
@ControllerAdvice
@Priority(5)
@Order(5)
public class ControllerErrorHandler {
private static final Logger logger = LoggerFactory.getLogger(ControllerErrorHandler.class);

View File

@ -4,6 +4,7 @@ import eu.eudat.exceptions.security.UnauthorisedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.Order;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
@ -16,7 +17,7 @@ import javax.annotation.Priority;
* Created by ikalyvas on 6/12/2018.
*/
@ControllerAdvice
@Priority(4)
@Order(4)
public class ControllerUnauthorisedHandler {
private static final Logger logger = LoggerFactory.getLogger(ControllerUnauthorisedHandler.class);

View File

@ -47,17 +47,17 @@ import java.util.stream.Stream;
public class DashBoardManager {
private static final Logger logger = LoggerFactory.getLogger(DashBoardManager.class);
private final Map<String, Comparator<RecentActivityModel>> comparators = Stream.of(new Object[][] {
{ "modified", Comparator.comparing(o -> ((RecentActivityModel)o).getModified()).reversed()},
{ "created", Comparator.comparing(o -> ((RecentActivityModel)o).getCreated()).reversed()},
{ "label", Comparator.comparing(o -> ((RecentActivityModel)o).getTitle())},
{ "status", Comparator.comparing(o -> ((RecentActivityModel)o).getStatus()).reversed()},
{ "finalizedAt", Comparator.comparing(o -> ((RecentActivityModel)o).getFinalizedAt(), Comparator.nullsLast(Comparator.naturalOrder())).reversed()},
{ "publishedAt", Comparator.comparing(o -> ((RecentActivityModel)o).getPublishedAt(), Comparator.nullsLast(Comparator.naturalOrder())).reversed()}
}).collect(Collectors.toMap(data -> (String) data[0], data -> (Comparator<RecentActivityModel>)data[1]));
private final Map<String, Comparator<RecentActivityModel>> comparators = Stream.of(new Object[][]{
{"modified", Comparator.comparing(o -> ((RecentActivityModel) o).getModified()).reversed()},
{"created", Comparator.comparing(o -> ((RecentActivityModel) o).getCreated()).reversed()},
{"label", Comparator.comparing(o -> ((RecentActivityModel) o).getTitle())},
{"status", Comparator.comparing(o -> ((RecentActivityModel) o).getStatus()).reversed()},
{"finalizedAt", Comparator.comparing(o -> ((RecentActivityModel) o).getFinalizedAt(), Comparator.nullsLast(Comparator.naturalOrder())).reversed()},
{"publishedAt", Comparator.comparing(o -> ((RecentActivityModel) o).getPublishedAt(), Comparator.nullsLast(Comparator.naturalOrder())).reversed()}
}).collect(Collectors.toMap(data -> (String) data[0], data -> (Comparator<RecentActivityModel>) data[1]));
private ApiContext apiContext;
private DatabaseRepository databaseRepository;
private final ApiContext apiContext;
private final DatabaseRepository databaseRepository;
private final DataManagementPlanManager dataManagementPlanManager;
private final DatasetManager datasetManager;
@ -152,21 +152,14 @@ public class DashBoardManager {
List<Integer> roles = new LinkedList<>();
if ((dmps == null || dmps == 0L) && (datasets == null || datasets == 0L)) {
CompletableFuture dmpFuture = dataManagementPlanRepository.getAuthenticated(dataManagementPlanRepository.getWithCriteria(dataManagementPlanCriteria), principal.getId(), roles).distinct().countAsync()
.whenComplete((dmpsStats, throwable) -> statistics.setTotalDataManagementPlanCount(dmpsStats));
CompletableFuture datasetFuture = datasetRepository.getAuthenticated( datasetRepository.getWithCriteria(datasetCriteria), user, roles).distinct().countAsync()
.whenComplete((datasetsStats, throwable) -> statistics.setTotalDataSetCount(datasetsStats));
CompletableFuture.allOf(dmpFuture, datasetFuture).join();
statistics.setTotalDataManagementPlanCount(dataManagementPlanRepository.getAuthenticated(dataManagementPlanRepository.getWithCriteria(dataManagementPlanCriteria), principal.getId(), roles).distinct().count());
statistics.setTotalDataSetCount(datasetRepository.getAuthenticated(datasetRepository.getWithCriteria(datasetCriteria), user, roles).distinct().count());
} else {
statistics.setTotalDataManagementPlanCount(dmps);
statistics.setTotalDataSetCount(datasets);
}
CompletableFuture grantFuture = grantRepository.getAuthenticated(grantRepository.getWithCriteria(grantCriteria), user).countAsync()
.whenComplete((grantsStats, throwable) -> statistics.setTotalGrantCount(grantsStats));
CompletableFuture orgnanisationFuture = organisationRepository.getAuthenticated(organisationRepository.getWithCriteria(organisationCriteria).withHint("organisationRecentActivity"), user).countAsync()
.whenComplete((organisationStats, throwable) -> statistics.setTotalOrganisationCount(organisationStats));
CompletableFuture.allOf( grantFuture, orgnanisationFuture).join();
statistics.setTotalGrantCount(grantRepository.getAuthenticated(grantRepository.getWithCriteria(grantCriteria), user).count());
statistics.setTotalOrganisationCount(organisationRepository.getAuthenticated(organisationRepository.getWithCriteria(organisationCriteria).withHint("organisationRecentActivity"), user).count());
return statistics;
}

View File

@ -65,7 +65,6 @@ import eu.eudat.types.MetricNames;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.apache.poi.xwpf.usermodel.XWPFParagraph;
import org.apache.poi.xwpf.usermodel.XWPFRun;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -97,21 +96,20 @@ import java.util.stream.Stream;
@Component
public class DataManagementPlanManager {
private static final Logger logger = LoggerFactory.getLogger(DataManagementPlanManager.class);
private static final ObjectMapper objectMapper = new ObjectMapper();
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][] {
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][]{
{NotificationType.DMP_MODIFIED, "/plans/edit"},
{NotificationType.DMP_PUBLISH, "/plans/publicEdit"},
{NotificationType.DMP_FINALISED, "/plans/edit"},
{NotificationType.DMP_MODIFIED_FINALISED, "/plans/edit"}
}).collect(Collectors.toMap(data -> (NotificationType) data[0], data -> (String) data[1]));
private ApiContext apiContext;
private DatasetManager datasetManager;
private DatabaseRepository databaseRepository;
private Environment environment;
private RDAManager rdaManager;
private UserManager userManager;
private final ApiContext apiContext;
private final DatasetManager datasetManager;
private final DatabaseRepository databaseRepository;
private final Environment environment;
private final RDAManager rdaManager;
private final UserManager userManager;
private final MetricsManager metricsManager;
private final ConfigLoader configLoader;
@ -125,6 +123,7 @@ public class DataManagementPlanManager {
this.userManager = userManager;
this.metricsManager = metricsManager;
this.configLoader = configLoader;
}
/*
@ -318,7 +317,7 @@ public class DataManagementPlanManager {
} else {
dataManagementPlan.fromDataModelNoDatasets(dataManagementPlanEntity);
}
Map dmpProperties = dataManagementPlanEntity.getDmpProperties() != null ? new org.json.JSONObject(dataManagementPlanEntity.getDmpProperties()).toMap() : null;
Map dmpProperties = dataManagementPlanEntity.getDmpProperties() != null ? apiContext.getUtilitiesService().getGenericObjectMapper().readValue(dataManagementPlanEntity.getDmpProperties(), LinkedHashMap.class) : null;
if (dmpProperties != null && dataManagementPlan.getDynamicFields() != null)
dataManagementPlan.getDynamicFields().forEach(item -> {
@ -1474,8 +1473,8 @@ public class DataManagementPlanManager {
DatasetWizardModel datasetWizardModel = new DatasetWizardModel();
Map<String, Object> properties = new HashMap<>();
if (dataset.getProperties() != null) {
JSONObject jobject = new JSONObject(dataset.getProperties());
properties = jobject.toMap();
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(dataset.getProperties(), LinkedHashMap.class);
;
}
PagedDatasetProfile pagedDatasetProfile = datasetManager.getPagedProfile(datasetWizardModel, dataset);
visibilityRuleService.setProperties(properties);
@ -1763,9 +1762,9 @@ public class DataManagementPlanManager {
UserInfo me = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
dmp.setModified(new Date());
dmp.setCreator(me);
Map<String, String> extraProperties = objectMapper.readValue(dmp.getExtraProperties(), HashMap.class);
Map<String, String> extraProperties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(dmp.getExtraProperties(), HashMap.class);
extraProperties.put("contact", me.getId().toString());
dmp.setExtraProperties(objectMapper.writeValueAsString(extraProperties));
dmp.setExtraProperties(apiContext.getUtilitiesService().getGenericObjectMapper().writeValueAsString(extraProperties));
dmp.setVersion(0);
dmp.setStatus((short)0);
dmp.setGroupId(UUID.randomUUID());
@ -2043,7 +2042,7 @@ public class DataManagementPlanManager {
headers.setContentType(MediaType.APPLICATION_JSON);
ZenodoDeposit deposit = DMPToZenodoMapper.fromDMP(dmp, environment, configLoader);
//if (Objects.requireNonNull(environment.getProperty("spring.profiles.active")).contains("devel")) {
String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(deposit);
String json = apiContext.getUtilitiesService().getGenericObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(deposit);
logger.info(json);
//}
HttpEntity<ZenodoDeposit> request = new HttpEntity<>(deposit, headers);

View File

@ -4,6 +4,8 @@ import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.CollectionType;
import com.fasterxml.jackson.databind.type.TypeFactory;
import eu.eudat.data.dao.criteria.*;
import eu.eudat.data.dao.entities.DataRepositoryDao;
import eu.eudat.data.dao.entities.DatasetDao;
@ -56,8 +58,6 @@ import org.apache.poi.xwpf.extractor.XWPFWordExtractor;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.apache.poi.xwpf.usermodel.XWPFParagraph;
import org.apache.poi.xwpf.usermodel.XWPFRun;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -98,18 +98,18 @@ import java.util.stream.Stream;
public class DatasetManager {
private static final Logger logger = LoggerFactory.getLogger(DatasetManager.class);
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][] {
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][]{
{NotificationType.DATASET_MODIFIED, "/datasets/edit"},
{NotificationType.DATASET_MODIFIED_FINALISED, "/datasets/edit"}
}).collect(Collectors.toMap(data -> (NotificationType) data[0], data -> (String) data[1]));
private ApiContext apiContext;
private DatabaseRepository databaseRepository;
private DatasetRepository datasetRepository;
private BuilderFactory builderFactory;
private UserManager userManager;
private ConfigLoader configLoader;
private Environment environment;
private final ApiContext apiContext;
private final DatabaseRepository databaseRepository;
private final DatasetRepository datasetRepository;
private final BuilderFactory builderFactory;
private final UserManager userManager;
private final ConfigLoader configLoader;
private final Environment environment;
private final MetricsManager metricsManager;
private final FileManager fileManager;
@ -168,20 +168,14 @@ public class DatasetManager {
datasets = null;
}
UserInfo userInfo = builderFactory.getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
// QueryableList<eu.eudat.data.entities.Dataset> items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));
QueryableList<eu.eudat.data.entities.Dataset> items;
if (datasets != null) {
if (!datasets.isEmpty()) {
if (datasets != null && !datasets.isEmpty()) {
//items = databaseRepository.getDatasetDao().asQueryable().withHint(HintedModelFactory.getHint(DatasetListingModel.class));
final List<UUID> datasetIds = datasets.stream().map(datasetE -> UUID.fromString(datasetE.getId())).distinct().collect(Collectors.toList());
items = databaseRepository.getDatasetDao().filterFromElastic(datasetTableRequest.getCriteria(), datasetIds).withHint(HintedModelFactory.getHint(DatasetListingModel.class));//.withFields(Collections.singletonList("id"));
//items.where((builder, root) -> root.get("id").in(datasetIds));
} else {
items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));//.withFields(Collections.singletonList("id"));
//items.where((builder, root) -> root.get("id").in(new UUID[]{UUID.randomUUID()}));
}
} else {
items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));//.withFields(Collections.singletonList("id"));
}
@ -192,11 +186,11 @@ public class DatasetManager {
if (principal.getId() == null) {
throw new UnauthorisedException("You are not allowed to access those datasets");
}
UserInfo userInfo = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());//builderFactory.getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
if (datasetTableRequest.getCriteria().getRole() != null) {
roles.add(datasetTableRequest.getCriteria().getRole());
}
authItems = databaseRepository.getDatasetDao().getAuthenticated(items, userInfo, roles).distinct();
pagedItems = PaginationManager.applyPaging(authItems, datasetTableRequest);
} else {
if (principal.getId() != null && datasetTableRequest.getCriteria().getRole() != null) {
items.where((builder, root) -> {
@ -204,12 +198,12 @@ public class DatasetManager {
return builder.and(builder.equal(userJoin.join("user", JoinType.LEFT).get("id"), principal.getId()), builder.equal(userJoin.get("role"), datasetTableRequest.getCriteria().getRole()));
});
}
String[] strings = new String[1];
//String[] strings = new String[1];
//strings[0] = "-dmp:publishedAt|join|";
//datasetTableRequest.getOrderings().setFields(strings);
authItems = items;
pagedItems = PaginationManager.applyPaging(items, datasetTableRequest);
}
pagedItems = PaginationManager.applyPaging(authItems, datasetTableRequest);
DataTableData<DatasetListingModel> dataTable = new DataTableData<>();
@ -387,8 +381,12 @@ public class DatasetManager {
eu.eudat.models.data.user.composite.DatasetProfile datasetprofile = userManager.generateDatasetProfileModel(datasetEntity.getProfile());
datasetprofile.setStatus(dataset.getStatus());
if (datasetEntity.getProperties() != null) {
JSONObject jObject = new JSONObject(datasetEntity.getProperties());
Map<String, Object> properties = jObject.toMap();
Map<String, Object> properties = null;
try {
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(datasetEntity.getProperties(), LinkedHashMap.class);
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
datasetprofile.fromJsonObject(properties);
}
PagedDatasetProfile pagedDatasetProfile = new PagedDatasetProfile();
@ -456,8 +454,7 @@ public class DatasetManager {
Map<String, Object> properties = new HashMap<>();
if (datasetEntity.getProperties() != null) {
JSONObject jObject = new JSONObject(datasetEntity.getProperties());
properties = jObject.toMap();
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(datasetEntity.getProperties(), LinkedHashMap.class);
}
wordBuilder.addParagraphContent("Dataset Description", document, ParagraphStyle.HEADER2, BigInteger.ZERO);
@ -489,8 +486,7 @@ public class DatasetManager {
Map<String, Object> properties = new HashMap<>();
if (dataset.getDatasetProfileDefinition() != null) {
JSONObject jObject = new JSONObject(propertiesModelToString(dataset.getDatasetProfileDefinition()));
properties = jObject.toMap();
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(propertiesModelToString(dataset.getDatasetProfileDefinition()), LinkedHashMap.class);
}
wordBuilder.addParagraphContent("Dataset Description", document, ParagraphStyle.HEADER2, BigInteger.ZERO);
@ -547,8 +543,7 @@ public class DatasetManager {
throw new UnauthorisedException();
Map<String, Object> properties = new HashMap<>();
if (datasetEntity.getProperties() != null) {
JSONObject jobject = new JSONObject(datasetEntity.getProperties());
properties = jobject.toMap();
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(datasetEntity.getProperties(), LinkedHashMap.class);
}
PagedDatasetProfile pagedDatasetProfile = getPagedProfile(dataset, datasetEntity);
visibilityRuleService.setProperties(properties);
@ -609,6 +604,7 @@ public class DatasetManager {
// datasetWizardModel.setDatasetProfileDefinition(getPagedProfile(datasetWizardModel, dataset1));
UUID dmpId = dataset1.getDmp().getId();
dataset1.getDmp().setUsers(new HashSet<>(apiContext.getOperationsContext().getDatabaseRepository().getUserDmpDao().asQueryable().where((builder, root) -> builder.equal(root.get("dmp").get("id"), dmpId)).toList()));
this.deleteOldFilesAndAddNew(datasetWizardModel, userInfo);
updateTags(dataset1, datasetWizardModel.getTags());
if (sendNotification) {
if (dataset1.getStatus() != Dataset.Status.FINALISED.getValue()) {
@ -618,9 +614,6 @@ public class DatasetManager {
}
}
this.deleteOldFilesAndAddNew(datasetWizardModel, userInfo);
return dataset1;
}
@ -631,7 +624,7 @@ public class DatasetManager {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
String json = mapper.writeValueAsString(datasetWizardModel.getDatasetProfileDefinition());;
String json = mapper.writeValueAsString(datasetWizardModel.getDatasetProfileDefinition());
JsonNode propertiesJson = mapper.readTree(json);
Set<JsonNode> uploadNodes = new HashSet<>();
@ -641,11 +634,15 @@ public class DatasetManager {
JsonNode value = node.get("value");
if (value != null && !value.toString().equals("\"\"") && !value.toString().equals("null")) {
String stringValue = value.toString().replaceAll("=", ":");
JSONObject values = new JSONObject(stringValue);
Map<String, Object> data = ((JSONObject) values).toMap();
Map<String, Object> data = null;
try {
data = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(stringValue, LinkedHashMap.class);
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
int index = fileUploadIds.indexOf(data.get("id").toString());
if(index != -1) {
if (index != -1) {
// file in DB is the same as file in the Dataset
fileUploadIds.remove(index);
fileUploads.remove(index);
@ -657,9 +654,7 @@ public class DatasetManager {
});
// old files in DB that are not contained anymore in the Dataset -> mark them as Deleted
fileUploads.forEach(fileUpload -> {
fileManager.markOldFileAsDeleted(fileUpload);
});
fileUploads.forEach(fileManager::markOldFileAsDeleted);
}
private void sendNotification(Dataset dataset, DMP dmp, UserInfo user, NotificationType notificationType) {
@ -707,9 +702,9 @@ public class DatasetManager {
nodeList = (NodeList) xPath.compile(expression).evaluate(xmlDocument, XPathConstants.NODESET);
JSONObject obj = new JSONObject(dataset.getProperties());
Map<String, Object> obj = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(dataset.getProperties(), LinkedHashMap.class);
VisibilityRuleService visibilityRuleService = new VisibilityRuleServiceImpl();
visibilityRuleService.setProperties(obj.toMap());
visibilityRuleService.setProperties(obj);
dataset.setProfile(profile);
PagedDatasetProfile pagedDatasetProfile = this.getPagedProfile(new DatasetWizardModel(), dataset);
@ -719,7 +714,7 @@ public class DatasetManager {
String failedField = null;
for (String validator : datasetProfileValidators) {
if (obj.has(validator) && isNullOrEmpty(obj.getString(validator)) && isElementVisible(nodeList, validator, visibilityRuleService)) {
if (obj.containsKey(validator) && isNullOrEmpty(obj.get(validator).toString()) && isElementVisible(nodeList, validator, visibilityRuleService)) {
//throw new Exception("Field value of " + validator + " must be filled.");
failedField = validator;
break;
@ -759,8 +754,12 @@ public class DatasetManager {
private String propertiesModelToString(PagedDatasetProfile pagedDatasetProfile) {
Map<String, Object> values = new LinkedHashMap<>();
pagedDatasetProfile.toMap(values);
JSONObject jobject = new JSONObject(values);
return jobject.toString();
try {
return apiContext.getUtilitiesService().getGenericObjectMapper().writeValueAsString(values);
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
return null;
}
public void updateTags(Dataset datasetEntity, List<Tag> tags) throws Exception {
@ -969,12 +968,9 @@ public class DatasetManager {
)))))
.collect(Collectors.toMap(DatasetImportField::getId, DatasetImportField::getValue));
// Transforms map into json file.
JSONObject jsonDatasetProperties = new JSONObject(importMap);
// Creates the entity data set to save.
eu.eudat.data.entities.Dataset entity = new Dataset();
entity.setProperties(jsonDatasetProperties.toString());
entity.setProperties(apiContext.getUtilitiesService().getGenericObjectMapper().writeValueAsString(importMap));
entity.setLabel(importFile.getOriginalFilename());
DMP dmp = new DMP();
dmp.setId(UUID.fromString(dmpId));
@ -1047,8 +1043,12 @@ public class DatasetManager {
eu.eudat.models.data.user.composite.DatasetProfile datasetprofile = userManager.generateDatasetProfileModel(profile);
datasetprofile.setStatus(datasetEntity.getStatus());
if (datasetEntity.getProperties() != null) {
JSONObject jobject = new JSONObject(datasetEntity.getProperties());
Map<String, Object> properties = jobject.toMap();
Map<String, Object> properties = null;
try {
properties = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(datasetEntity.getProperties(), LinkedHashMap.class);
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
datasetprofile.fromJsonObject(properties);
}
PagedDatasetProfile pagedDatasetProfile = new PagedDatasetProfile();
@ -1117,11 +1117,15 @@ public class DatasetManager {
JsonNode value = node.get("value");
if (!value.toString().equals("\"\"") && !value.toString().equals("null") && value.toString().startsWith("[")) {
String stringValue = value.toString().replaceAll("=", ":");
JSONArray values = new JSONArray(stringValue);
values.iterator().forEachRemaining(element -> {
Map<String, Object> data = ((JSONObject) element).toMap();
this.addTag(tags, wizardModel.getTags(), data.get("id").toString(), data.get("name").toString());
});
List<Tag> values = null;
try {
CollectionType tagCollection = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, Tag.class);
values = apiContext.getUtilitiesService().getGenericObjectMapper().readValue(stringValue, tagCollection);
values.iterator().forEachRemaining(element -> this.addTag(tags, wizardModel.getTags(), element.getId(), element.getName()));
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
} else {
List<String> values = Arrays.asList(value.textValue().split(", "));
List<Tag> tagValues = values.stream().map(stringValue -> new Tag(stringValue, stringValue)).collect(Collectors.toList());

View File

@ -60,6 +60,10 @@ public class FileManager {
databaseRepository.getFileUploadDao().createOrUpdate(fileUpload);
}
public List<FileUpload> getDeletedFileUploads() {
return databaseRepository.getFileUploadDao().asQueryable().where(((builder, root) -> builder.equal(root.get("isDeleted"), true))).toList();
}
public List<FileUpload> getFileUploadsForEntityId(String entityId) {
return databaseRepository.getFileUploadDao().asQueryable()
.where((builder, root) -> builder.equal(root.get("entityId"), entityId)).toList();
@ -74,9 +78,7 @@ public class FileManager {
public void markAllFilesOfEntityIdAsDeleted(UUID entityId) {
List<FileUpload> fileUploads = this.getCurrentFileUploadsForEntityId(entityId);
fileUploads.forEach(fileUpload -> {
this.markOldFileAsDeleted(fileUpload);
});
fileUploads.forEach(this::markOldFileAsDeleted);
}
public void createFile(String id, String fileName, String fileType, String entityId, FileUpload.EntityType entityType, UserInfo userInfo) {

View File

@ -33,7 +33,6 @@ import eu.eudat.models.data.userinfo.UserProfile;
import eu.eudat.queryable.QueryableList;
import eu.eudat.types.Authorities;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -129,11 +128,11 @@ public class UserManager {
eu.eudat.data.entities.UserInfo userInfo = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
apiContext.getOperationsContext().getDatabaseRepository().detachEntity(userInfo);
HashMap<String, Object> result =
new ObjectMapper().readValue(userInfo.getAdditionalinfo(), HashMap.class);
apiContext.getUtilitiesService().getGenericObjectMapper().readValue(userInfo.getAdditionalinfo(), HashMap.class);
userInfo.setName(settings.entrySet().stream().filter(entry -> entry.getKey().equals("name")).filter(Objects::nonNull).map(entry -> entry.getValue().toString()).findFirst().orElse(userInfo.getName()));
settings.remove("name");
result.putAll(settings);
userInfo.setAdditionalinfo(new JSONObject(result).toString());
userInfo.setAdditionalinfo(apiContext.getUtilitiesService().getGenericObjectMapper().writeValueAsString(result));
apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao()
.createOrUpdate(userInfo);
}

View File

@ -24,7 +24,7 @@ import eu.eudat.models.data.datasetprofile.RenderStyle;
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
import eu.eudat.models.data.externaldataset.ExternalAutocompleteFieldModel;
import eu.eudat.models.data.license.LicenseModel;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -35,6 +35,7 @@ import java.util.stream.Collectors;
public class PrefillingMapper {
private static final ObjectMapper mapper = new ObjectMapper().configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
private static final List<String> tagDelimiters = Arrays.asList(", ", "; ");
public static DatasetWizardModel mapPrefilledEntityToDatasetWizard(Map<String, Object> prefilledEntity, PrefillingGet prefillingGet, String type,
DatasetProfile profile, DatasetManager datasetManager, LicenseManager licenseManager) throws Exception {
@ -155,10 +156,10 @@ public class PrefillingMapper {
properties.put(id, mapper.valueToTree(parseTags(parsedValue)).toString());
break;
case DATASET_IDENTIFIER:
JSONObject datasetID = new JSONObject();
datasetID.put("identifier", parsedValue);
if(type.equals("zenodo")){
datasetID.put("type", "doi");
Map<String, String> datasetId = new LinkedHashMap<>();
datasetId.put("identifier", parsedValue);
if (type.equals("zenodo")) {
datasetId.put("type", "doi");
}
properties.put(id, datasetID.toString());
break;
@ -225,13 +226,23 @@ public class PrefillingMapper {
}
private static List<Tag> parseTags(String value) throws JsonProcessingException {
if (value == null || value.isEmpty())
return new LinkedList<>();
String[] rawTags = value.split(", ");
String[] rawTags = null;
for (String tagDelim : tagDelimiters) {
rawTags = value.split(tagDelim);
if (rawTags.length > 1) {
break;
}
}
if (rawTags != null) {
List<Tag> parsedTags = new LinkedList<>();
for (String rawTag : rawTags) {
parsedTags.add(new Tag(rawTag, rawTag));
}
return parsedTags;
}
return null;
}
}

View File

@ -11,7 +11,6 @@ import java.net.*;
@RestController
@CrossOrigin
public class Proxy {
private String allowedHost;

View File

@ -1,6 +1,5 @@
package eu.eudat.logic.security.customproviders.B2Access;
import com.google.api.client.repackaged.org.apache.commons.codec.binary.Base64;
import eu.eudat.logic.security.validators.b2access.helpers.B2AccessResponseToken;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
@ -14,6 +13,7 @@ import org.springframework.util.MultiValueMap;
import org.springframework.web.client.RestTemplate;
import java.nio.charset.Charset;
import java.util.Base64;
import java.util.Map;
@Component("b2AccessCustomProvider")
@ -60,8 +60,7 @@ public class B2AccessCustomProviderImpl implements B2AccessCustomProvider {
private HttpHeaders createBasicAuthHeaders(String username, String password) {
return new HttpHeaders() {{
String auth = username + ":" + password;
byte[] encodedAuth = Base64.encodeBase64(
auth.getBytes(Charset.forName("US-ASCII")));
byte[] encodedAuth = Base64.getEncoder().encode(auth.getBytes(Charset.forName("US-ASCII")));
String authHeader = "Basic " + new String(encodedAuth);
set("Authorization", authHeader);
}};

View File

@ -5,7 +5,7 @@ import com.google.api.client.googleapis.auth.oauth2.GoogleIdToken.Payload;
import com.google.api.client.googleapis.auth.oauth2.GoogleIdTokenVerifier;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.json.gson.GsonFactory;
import eu.eudat.logic.security.validators.TokenValidator;
import eu.eudat.logic.security.validators.TokenValidatorFactoryImpl;
import eu.eudat.logic.services.operations.authentication.AuthenticationService;
@ -29,7 +29,7 @@ public class GoogleTokenValidator implements TokenValidator {
@Autowired
public GoogleTokenValidator(Environment environment, AuthenticationService nonVerifiedUserAuthenticationService) {
this.nonVerifiedUserAuthenticationService = nonVerifiedUserAuthenticationService;
verifier = new GoogleIdTokenVerifier.Builder(transport, JacksonFactory.getDefaultInstance())
verifier = new GoogleIdTokenVerifier.Builder(transport, GsonFactory.getDefaultInstance())
.setAudience(Collections.singletonList(environment.getProperty("google.login.clientId")))
.build();
}

View File

@ -1,5 +1,6 @@
package eu.eudat.logic.services.operations.authentication;
import com.fasterxml.jackson.core.JsonProcessingException;
import eu.eudat.data.entities.Credential;
import eu.eudat.data.entities.UserInfo;
import eu.eudat.data.entities.UserRole;
@ -14,7 +15,6 @@ import eu.eudat.models.data.login.Credentials;
import eu.eudat.models.data.loginprovider.LoginProviderUser;
import eu.eudat.models.data.security.Principal;
import eu.eudat.types.Authorities;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.Environment;
@ -164,8 +164,13 @@ public abstract class AbstractAuthenticationService implements AuthenticationSer
apiContext.getOperationsContext().getDatabaseRepository().getUserRoleDao().createOrUpdate(role);
} else {
Map<String, Object> additionalInfo = userInfo.getAdditionalinfo() != null ?
new JSONObject(userInfo.getAdditionalinfo()).toMap() : new HashMap<>();
Map<String, Object> additionalInfo = null;
try {
additionalInfo = userInfo.getAdditionalinfo() != null ?
apiContext.getUtilitiesService().getGenericObjectMapper().readValue(userInfo.getAdditionalinfo(), LinkedHashMap.class) : new HashMap<>();
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
if (profile.getAvatarUrl() != null && !profile.getAvatarUrl().isEmpty() && !profile.getAvatarUrl().equals("null")) {
additionalInfo.put("avatarUrl", profile.getAvatarUrl());
}
@ -182,7 +187,11 @@ public abstract class AbstractAuthenticationService implements AuthenticationSer
additionalInfo.put("zenodoEmail", profile.getEmail());
}
userInfo.setLastloggedin(new Date());
userInfo.setAdditionalinfo(new JSONObject(additionalInfo).toString());
try {
userInfo.setAdditionalinfo(apiContext.getUtilitiesService().getGenericObjectMapper().writeValueAsString(additionalInfo));
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
Set<Credential> credentials = userInfo.getCredentials();
if (credentials.contains(credential)) {
Credential oldCredential = credentials.stream().filter(item -> credential.getProvider().equals(item.getProvider())).findFirst().get();

View File

@ -1,27 +1,21 @@
package eu.eudat.logic.services.operations.authentication;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.Credential;
import eu.eudat.data.entities.UserInfo;
import eu.eudat.data.entities.UserRole;
import eu.eudat.data.entities.UserToken;
import eu.eudat.exceptions.security.NullEmailException;
import eu.eudat.logic.builders.entity.CredentialBuilder;
import eu.eudat.logic.builders.entity.UserInfoBuilder;
import eu.eudat.logic.builders.entity.UserTokenBuilder;
import eu.eudat.logic.builders.model.models.PrincipalBuilder;
import eu.eudat.logic.security.validators.TokenValidatorFactoryImpl;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.models.data.login.Credentials;
import eu.eudat.models.data.loginprovider.LoginProviderUser;
import eu.eudat.models.data.security.Principal;
import eu.eudat.types.Authorities;
import org.json.JSONObject;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import java.time.Instant;
import java.util.*;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
@Service("verifiedUserAuthenticationService")

View File

@ -1,7 +1,6 @@
package eu.eudat.logic.services.utilities;
import eu.eudat.models.data.mail.SimpleMail;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -17,10 +16,15 @@ import javax.mail.MessagingException;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import java.io.*;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
@Service("mailService")
@ -84,10 +88,11 @@ public class MailServiceImpl implements MailService {
Resource resource = applicationContext.getResource(resourceTemplate);
try {
InputStream inputStream = resource.getInputStream();
StringWriter writer = new StringWriter();
IOUtils.copy(inputStream, writer, "UTF-8");
String tempate = new BufferedReader(
new InputStreamReader(inputStream, StandardCharsets.UTF_8)
).lines().collect(Collectors.joining("\n"));
inputStream.close();
return writer.toString();
return tempate;
} catch (IOException e) {
logger.error(e.getMessage(), e);
}

View File

@ -1,5 +1,6 @@
package eu.eudat.logic.services.utilities;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.services.forms.VisibilityRuleService;
/**
@ -12,4 +13,6 @@ public interface UtilitiesService {
MailService getMailService();
ConfirmationEmailService getConfirmationEmailService();
ObjectMapper getGenericObjectMapper();
}

View File

@ -1,5 +1,6 @@
package eu.eudat.logic.services.utilities;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.services.forms.VisibilityRuleService;
import eu.eudat.logic.services.forms.VisibilityRuleServiceImpl;
import org.springframework.beans.factory.annotation.Autowired;
@ -14,12 +15,14 @@ public class UtilitiesServiceImpl implements UtilitiesService {
private InvitationService invitationService;
private MailService mailService;
private ConfirmationEmailService confirmationEmailService;
private final ObjectMapper objectMapper;
@Autowired
public UtilitiesServiceImpl(InvitationService invitationService, MailService mailService, ConfirmationEmailService confirmationEmailService) {
this.invitationService = invitationService;
this.mailService = mailService;
this.confirmationEmailService = confirmationEmailService;
this.objectMapper = new ObjectMapper();
}
@Override
@ -27,6 +30,11 @@ public class UtilitiesServiceImpl implements UtilitiesService {
return confirmationEmailService;
}
@Override
public ObjectMapper getGenericObjectMapper() {
return this.objectMapper;
}
@Override
public InvitationService getInvitationService() {
return invitationService;

View File

@ -1,5 +1,6 @@
package eu.eudat.logic.utilities.builders;
import eu.eudat.logic.utilities.helpers.FieldFactory;
import eu.eudat.models.data.entities.xmlmodels.datasetprofiledefinition.DatabaseViewStyleDefinition;
import eu.eudat.models.data.components.commons.datafield.*;
import eu.eudat.models.data.entities.xmlmodels.modeldefinition.DatabaseModelDefinition;
@ -20,9 +21,7 @@ public class ModelBuilder {
for (U item : items) {
try {
list.add(item.toDatabaseDefinition(clazz.newInstance()));
} catch (InstantiationException e) {
logger.error(e.getMessage(), e);
} catch (IllegalAccessException e) {
} catch (InstantiationException | IllegalAccessException e) {
logger.error(e.getMessage(), e);
}
}
@ -34,9 +33,7 @@ public class ModelBuilder {
for (U item : items) {
try {
list.add(item.toDatabaseDefinition(clazz.newInstance()));
} catch (InstantiationException e) {
logger.error(e.getMessage(), e);
} catch (IllegalAccessException e) {
} catch (InstantiationException | IllegalAccessException e) {
logger.error(e.getMessage(), e);
}
}
@ -50,64 +47,71 @@ public class ModelBuilder {
U modelItem = clazz.newInstance();
modelItem.fromDatabaseDefinition(item);
list.add(modelItem);
} catch (InstantiationException e) {
logger.error(e.getMessage(), e);
} catch (IllegalAccessException e) {
} catch (InstantiationException | IllegalAccessException e) {
logger.error(e.getMessage(), e);
}
}
return list;
}
public <U> FieldData<U> toFieldData(Object data, String type, Element dataElement) {
if (type.equals("combobox")) {
public FieldData toFieldData(Object data, String type, Element dataElement) {
if (dataElement != null && dataElement.hasAttribute("type")) {
logger.info("Type " + type + " SubType " + dataElement.getAttribute("type"));
return FieldFactory.fromData(type, data, dataElement.getAttribute("type"));
}
return FieldFactory.fromData(type, data);
/*if (type.equals("combobox")) {
if (dataElement != null) {
if (dataElement.getAttribute("type").equals("autocomplete")) {
return (FieldData<U>) new AutoCompleteData().fromData(data);
return new AutoCompleteData().fromData(data);
} else if (dataElement.getAttribute("type").equals("wordlist"))
return (FieldData<U>) new WordListData().fromData(data);
return new WordListData().fromData(data);
}
}
if (type.equals("internalDmpEntities")) {
if (dataElement != null) {
if (dataElement.getAttribute("type").equals("researchers")) {
return (FieldData<U>) new ResearchersAutoCompleteData().fromData(data);
return new ResearchersAutoCompleteData().fromData(data);
}
else if (dataElement.getAttribute("type").equals("datasets"))
return (FieldData<U>) new DatasetsAutoCompleteData().fromData(data);
return new DatasetsAutoCompleteData().fromData(data);
else if (dataElement.getAttribute("type").equals("dmps"))
return (FieldData<U>) new DMPsAutoCompleteData().fromData(data);
return new DMPsAutoCompleteData().fromData(data);
}
}
if (type.equals("booleanDecision")) return (FieldData<U>) new BooleanDecisionData().fromData(data);
if (type.equals("radiobox")) return (FieldData<U>) new RadioBoxData().fromData(data);
if (type.equals("checkBox")) return (FieldData<U>) new CheckBoxData().fromData(data);
if (type.equals("freetext")) return (FieldData<U>) new FreeTextData().fromData(data);
if (type.equals("textarea")) return (FieldData<U>) new TextAreaData().fromData(data);
if (type.equals("richTextarea")) return (FieldData<U>) new RichTextAreaData().fromData(data);
if (type.equals("upload")) return (FieldData<U>) new UploadData().fromData(data);
if (type.equals("booleanDecision")) return new BooleanDecisionData().fromData(data);
if (type.equals("radiobox")) return new RadioBoxData().fromData(data);
if (type.equals("checkBox")) return new CheckBoxData().fromData(data);
if (type.equals("freetext")) return new FreeTextData().fromData(data);
if (type.equals("textarea")) return new TextAreaData().fromData(data);
if (type.equals("richTextarea")) return new RichTextAreaData().fromData(data);
if (type.equals("upload")) return new UploadData().fromData(data);
// if (type.equals("table")) return (FieldData<U>) new TableData().fromData(data);
if (type.equals("datePicker")) return (FieldData<U>) new DatePickerData().fromData(data);
if (type.equals("externalDatasets")) return (FieldData<U>) new ExternalDatasetsData().fromData(data);
if (type.equals("dataRepositories")) return (FieldData<U>) new DataRepositoriesData().fromData(data);
if (type.equals("pubRepositories")) return (FieldData<U>) new DataRepositoriesData().fromData(data);
if (type.equals("journalRepositories")) return (FieldData<U>) new DataRepositoriesData().fromData(data);
if (type.equals("taxonomies")) return (FieldData<U>) new TaxonomiesData().fromData(data);
if (type.equals("licenses")) return (FieldData<U>) new LicensesData().fromData(data);
if (type.equals("publications")) return (FieldData<U>) new PublicationsData().fromData(data);
if (type.equals("registries")) return (FieldData<U>) new RegistriesData().fromData(data);
if (type.equals("services")) return (FieldData<U>) new ServicesData().fromData(data);
if (type.equals("tags")) return (FieldData<U>) new TagsData().fromData(data);
if (type.equals("researchers")) return (FieldData<U>) new ResearcherData().fromData(data);
if (type.equals("organizations")) return (FieldData<U>) new OrganizationsData().fromData(data);
if (type.equals("datasetIdentifier")) return (FieldData<U>) new DatasetIdentifierData().fromData(data);
if (type.equals("currency")) return (FieldData<U>) new CurrencyData().fromData(data);
if (type.equals("validation")) return (FieldData<U>) new ValidationData().fromData(data);
return null;
if (type.equals("datePicker")) return new DatePickerData().fromData(data);
if (type.equals("externalDatasets")) return new ExternalDatasetsData().fromData(data);
if (type.equals("dataRepositories")) return new DataRepositoriesData().fromData(data);
if (type.equals("pubRepositories")) return new DataRepositoriesData().fromData(data);
if (type.equals("journalRepositories")) return new DataRepositoriesData().fromData(data);
if (type.equals("taxonomies")) return new TaxonomiesData().fromData(data);
if (type.equals("licenses")) return new LicensesData().fromData(data);
if (type.equals("publications")) return new PublicationsData().fromData(data);
if (type.equals("registries")) return new RegistriesData().fromData(data);
if (type.equals("services")) return new ServicesData().fromData(data);
if (type.equals("tags")) return new TagsData().fromData(data);
if (type.equals("researchers")) return new ResearcherData().fromData(data);
if (type.equals("organizations")) return new OrganizationsData().fromData(data);
if (type.equals("datasetIdentifier")) return new DatasetIdentifierData().fromData(data);
if (type.equals("currency")) return new CurrencyData().fromData(data);
if (type.equals("validation")) return new ValidationData().fromData(data);*/
//return null;
}
public <U> FieldData<U> toFieldData(Object data, String type) {
if (type.equals("combobox")) {
public FieldData toFieldData(Object data, String type) {
if (data != null && ((Map<String, Object>) data).containsKey("type")) {
return FieldFactory.fromData(type, data, (String) ((Map<String, Object>) data).get("type"));
}
return FieldFactory.fromData(type, data);
/*if (type.equals("combobox")) {
String comboboxType = (String) ((Map<String, Object>) data).get("type");
if (comboboxType.equals("autocomplete")) {
return (FieldData<U>) new AutoCompleteData().fromData(data);
@ -150,6 +154,6 @@ public class ModelBuilder {
if (type.equals("datasetIdentifier")) return (FieldData<U>) new DatasetIdentifierData().fromData(data);
if (type.equals("currency")) return (FieldData<U>) new CurrencyData().fromData(data);
if (type.equals("validation")) return (FieldData<U>) new ValidationData().fromData(data);
return null;
return null;*/
}
}

View File

@ -1,7 +1,6 @@
package eu.eudat.logic.utilities.documents.pdf;
import eu.eudat.logic.utilities.documents.helpers.FileEnvelope;
import org.apache.commons.io.IOUtils;
import org.springframework.core.env.Environment;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpEntity;
@ -36,7 +35,8 @@ public class PDFUtils {
File resultPdf = new File(environment.getProperty("temp.temp") + uuid + ".pdf");
FileOutputStream output = new FileOutputStream(resultPdf);
IOUtils.write(queueResult, output);
output.write(queueResult);
output.flush();
output.close();
Files.deleteIfExists(file.getFile().toPath());

View File

@ -1,5 +1,6 @@
package eu.eudat.logic.utilities.documents.word;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.services.forms.VisibilityRuleService;
@ -15,11 +16,12 @@ import eu.eudat.models.data.user.composite.PagedDatasetProfile;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.util.Units;
import org.apache.poi.xwpf.usermodel.*;
import org.json.JSONArray;
import org.json.JSONException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.*;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAbstractNum;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDecimalNumber;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvl;
import org.openxmlformats.schemas.wordprocessingml.x2006.main.STNumberFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.Environment;
@ -45,7 +47,7 @@ import static org.apache.poi.xwpf.usermodel.Document.*;
public class WordBuilder {
private static final Logger logger = LoggerFactory.getLogger(WordBuilder.class);
private static final Map<String, Integer> IMAGE_TYPE_MAP = Stream.of(new Object[][] {
private static final Map<String, Integer> IMAGE_TYPE_MAP = Stream.of(new Object[][]{
{"image/jpeg", PICTURE_TYPE_JPEG},
{"image/png", PICTURE_TYPE_PNG},
{"image/gif", PICTURE_TYPE_GIF},
@ -53,18 +55,19 @@ public class WordBuilder {
{"image/bmp", PICTURE_TYPE_BMP},
{"image/wmf", PICTURE_TYPE_WMF}
}
).collect(Collectors.toMap(objects -> (String)objects[0], o -> (Integer)o[1]));
).collect(Collectors.toMap(objects -> (String) objects[0], o -> (Integer) o[1]));
private Map<ParagraphStyle, ApplierWithValue<XWPFDocument, Object, XWPFParagraph>> options = new HashMap<>();
private CTAbstractNum cTAbstractNum;
private final Map<ParagraphStyle, ApplierWithValue<XWPFDocument, Object, XWPFParagraph>> options = new HashMap<>();
private final CTAbstractNum cTAbstractNum;
private BigInteger numId;
private Integer indent;
private static final ObjectMapper mapper = new ObjectMapper();
private final ObjectMapper mapper;
public WordBuilder(Environment environment) {
this.cTAbstractNum = CTAbstractNum.Factory.newInstance();
this.cTAbstractNum.setAbstractNumId(BigInteger.valueOf(1));
this.indent = 0;
this.mapper = new ObjectMapper();
this.buildOptions(environment);
}
@ -164,7 +167,7 @@ public class WordBuilder {
int format;
format = IMAGE_TYPE_MAP.getOrDefault(fileType, 0);
try {
FileInputStream image = new FileInputStream(environment.getProperty("file.storage") + imageId);
ImageInputStream iis = ImageIO.createImageInputStream(new File(environment.getProperty("file.storage") + imageId));
Iterator<ImageReader> readers = ImageIO.getImageReaders(iis);
if (readers.hasNext()) {
@ -174,30 +177,30 @@ public class WordBuilder {
int initialImageWidth = reader.getWidth(0);
int initialImageHeight = reader.getHeight(0);
float ratio = initialImageHeight / (float)initialImageWidth;
float ratio = initialImageHeight / (float) initialImageWidth;
int marginLeftInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getLeft().intValue();
int marginRightInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getRight().intValue();
int pageWidthInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgSz().getW().intValue();
int pageWidth = Math.round((pageWidthInDXA - marginLeftInDXA - marginRightInDXA) / (float)20); // /20 converts dxa to points
int marginLeftInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getLeft()).intValue();
int marginRightInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getRight()).intValue();
int pageWidthInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgSz().getW()).intValue();
int pageWidth = Math.round((pageWidthInDXA - marginLeftInDXA - marginRightInDXA) / (float) 20); // /20 converts dxa to points
int imageWidth = Math.round(initialImageWidth*(float)0.75); // *0.75 converts pixels to points
int imageWidth = Math.round(initialImageWidth * (float) 0.75); // *0.75 converts pixels to points
int width = Math.min(imageWidth, pageWidth);
int marginTopInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getTop().intValue();
int marginBottomInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getBottom().intValue();
int pageHeightInDXA = mainDocumentPart.getDocument().getBody().getSectPr().getPgSz().getH().intValue();
int pageHeight = Math.round((pageHeightInDXA - marginTopInDXA - marginBottomInDXA) / (float)20); // /20 converts dxa to points
int marginTopInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getTop()).intValue();
int marginBottomInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgMar().getBottom()).intValue();
int pageHeightInDXA = ((BigInteger) mainDocumentPart.getDocument().getBody().getSectPr().getPgSz().getH()).intValue();
int pageHeight = Math.round((pageHeightInDXA - marginTopInDXA - marginBottomInDXA) / (float) 20); // /20 converts dxa to points
int imageHeight = Math.round(initialImageHeight * ((float)0.75)); // *0.75 converts pixels to points
int imageHeight = Math.round(initialImageHeight * ((float) 0.75)); // *0.75 converts pixels to points
int height = Math.round(width*ratio);
if(height > pageHeight) {
int height = Math.round(width * ratio);
if (height > pageHeight) {
// height calculated with ratio is too large. Image may have Portrait (vertical) orientation. Recalculate image dimensions.
height = Math.min(imageHeight, pageHeight);
width = Math.round(height/ratio);
width = Math.round(height / ratio);
}
FileInputStream image = new FileInputStream(environment.getProperty("file.storage") + imageId);
run.addPicture(image, format, fileName, Units.toEMU(width), Units.toEMU(height));
paragraph.setPageBreak(false);
}
@ -380,16 +383,16 @@ public class WordBuilder {
return null;
}
try {
JSONArray array = new JSONArray(JavaToJson.objectStringToJson(format));
List<Map<String, Object>> array = this.mapper.readValue(JavaToJson.objectStringToJson(format), ArrayList.class);
StringBuilder multipleFormats = new StringBuilder();
for (int i = 0; i < array.length(); i++) {
multipleFormats.append(array.getJSONObject(i).getString(attribute)).append(", ");
for (Map<String, Object> node : array) {
multipleFormats.append(node.get(attribute)).append(", ");
}
if (multipleFormats.length() > 0) {
multipleFormats.setLength(multipleFormats.length() - 2);
}
return multipleFormats.toString();
} catch (JSONException e) {
} catch (JsonProcessingException e) {
return format;
}
}

View File

@ -1,7 +1,6 @@
package eu.eudat.logic.utilities.documents.xml;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.MismatchedInputException;
import eu.eudat.logic.services.forms.VisibilityRuleService;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import eu.eudat.models.data.components.commons.datafield.ExternalDatasetsData;
@ -10,8 +9,6 @@ import eu.eudat.models.data.user.components.datasetprofile.FieldSet;
import eu.eudat.models.data.user.components.datasetprofile.Section;
import eu.eudat.models.data.user.composite.DatasetProfilePage;
import eu.eudat.models.data.user.composite.PagedDatasetProfile;
import org.json.JSONArray;
import org.json.JSONException;
import org.springframework.core.env.Environment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;

View File

@ -0,0 +1,63 @@
package eu.eudat.logic.utilities.helpers;
import eu.eudat.models.data.components.commons.datafield.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class FieldFactory {
private static final Logger logger = LoggerFactory.getLogger(FieldFactory.class);
private static final Map<String, Class<? extends FieldData>> fieldMap = Stream.of(new Object[][]{
{"combobox", ComboBoxData.class},
{"autocomplete", AutoCompleteData.class},
{"wordlist", WordListData.class},
{"internalDmpEntities", InternalDmpEntitiesData.class},
{"researchers", ResearcherData.class},
{"datasets", DatasetsAutoCompleteData.class},
{"dmps", DMPsAutoCompleteData.class},
{"booleanDecision", BooleanDecisionData.class},
{"radiobox", RadioBoxData.class},
{"checkBox", CheckBoxData.class},
{"freetext", FreeTextData.class},
{"textarea", TextAreaData.class},
{"richTextarea", RichTextAreaData.class},
{"upload", UploadData.class},
{"datePicker", DatePickerData.class},
{"externalDatasets", ExternalDatasetsData.class},
{"dataRepositories", DataRepositoriesData.class},
{"pubRepositories", DataRepositoriesData.class},
{"journalRepositories", DataRepositoriesData.class},
{"taxonomies", TaxonomiesData.class},
{"licenses", LicensesData.class},
{"publications", PublicationsData.class},
{"registries", RegistriesData.class},
{"services", ServicesData.class},
{"tags", TagsData.class},
{"organizations", OrganizationsData.class},
{"datasetIdentifier", DatasetIdentifierData.class},
{"currency", CurrencyData.class},
{"validation", ValidationData.class}
}).collect(Collectors.toMap(data -> (String) data[0], data -> (Class<? extends FieldData>) data[1]));
public static FieldData fromData(String type, Object data, String subType) {
if (Modifier.isAbstract(fieldMap.get(type).getModifiers())) {
return fromData(subType, data);
} else {
return fromData(type, data);
}
}
public static FieldData fromData(String type, Object data) {
try {
return (FieldData) fieldMap.get(type).newInstance().fromData(data);
} catch (InstantiationException | IllegalAccessException e) {
logger.error(e.getMessage(), e);
}
return null;
}
}

View File

@ -0,0 +1,120 @@
package eu.eudat.logic.utilities.schedule.file;
import eu.eudat.configurations.file.PermProperties;
import eu.eudat.configurations.file.TimeProperties;
import eu.eudat.configurations.file.TmpProperties;
import eu.eudat.data.entities.FileUpload;
import eu.eudat.logic.managers.FileManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Component
public class FileCleanUpJobs {
private static final Logger logger = LoggerFactory.getLogger(FileCleanUpJobs.class);
private final FileManager fileManager;
private final Environment environment;
@Autowired
public FileCleanUpJobs(FileManager fileManager, Environment environment, PermProperties permProperties, TmpProperties tmpProperties) {
this.fileManager = fileManager;
this.environment = environment;
if (permProperties != null) {
System.getProperties().put("perm.cron", createCron(permProperties.getTime()));
}
if (tmpProperties != null) {
System.getProperties().put("temp.cron", createCron(tmpProperties.getTime()));
}
}
@Scheduled(cron = "${perm.cron}")
public void deleteUnusedPermFiles() {
logger.info("Start deleting unused permanent files");
List<FileUpload> fileUploads = fileManager.getDeletedFileUploads();
try {
List<Path> tempFilePaths = Files.walk(Paths.get(Objects.requireNonNull(environment.getProperty("file.storage")))).filter(Files::isRegularFile).collect(Collectors.toList());
for (Path tempFilePath : tempFilePaths) {
if (fileUploads.stream().map(FileUpload::getId).anyMatch(uuid -> tempFilePath.endsWith(uuid.toString()))) {
Files.deleteIfExists(tempFilePath);
}
}
logger.info("Unused permanent files have been successfully deleted");
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
}
}
@Scheduled(cron = "${temp.cron}")
public void deleteTempFiles() {
logger.info("Start deleting temporary files");
try {
List<Path> tempFilePaths = Files.walk(Paths.get(Objects.requireNonNull(environment.getProperty("temp.temp")))).filter(Files::isRegularFile).collect(Collectors.toList());
for (Path tempFilePath : tempFilePaths) {
Files.deleteIfExists(tempFilePath);
}
logger.info("Temporary files have been successfully deleted");
} catch (IOException e) {
logger.error(e.getLocalizedMessage(), e);
}
}
private String createCron(TimeProperties timeProperties) {
List<String> cronList = new LinkedList<>();
cronList.add("0");
switch (timeProperties.getTimeUnit()) {
case MINUTES:
cronList.add("0/" + timeProperties.getInterval());
break;
case HOURS:
cronList.add(toSafeString(timeProperties.getStartTime().getMinute()));
cronList.add("0/" + timeProperties.getInterval());
break;
case DAYS:
cronList.add(toSafeString(timeProperties.getStartTime().getMinute()));
cronList.add(toSafeString(timeProperties.getStartTime().getHour()));
cronList.add("*/" + timeProperties.getInterval());
break;
case MONTHS:
cronList.add(toSafeString(timeProperties.getStartTime().getMinute()));
cronList.add(toSafeString(timeProperties.getStartTime().getHour()));
cronList.add(toSafeString(timeProperties.getStartTime().getDayOfMonth(), "1"));
cronList.add("*/" + timeProperties.getInterval());
break;
case WEEKS:
cronList.add(toSafeString(timeProperties.getStartTime().getMinute()));
cronList.add(toSafeString(timeProperties.getStartTime().getHour()));
cronList.add("*/" + (timeProperties.getInterval() * 7));
cronList.add("*");
cronList.add(toSafeString(timeProperties.getStartTime().getDayOfWeek(), "1"));
break;
}
if (cronList.size() < 6) {
for (int i = cronList.size(); i < 6; i++) {
cronList.add("*");
}
}
return String.join(" ", cronList);
}
private String toSafeString(Object object) {
return toSafeString(object, "0");
}
private String toSafeString(Object object, String defaultValue) {
return object != null ? object.toString() : defaultValue;
}
}

View File

@ -20,8 +20,8 @@ import java.util.concurrent.CompletableFuture;
public class NotificationScheduleJob {
private static final Logger logger = LoggerFactory.getLogger(NotificationScheduleJob.class);
private ApiContext apiContext;
private NotificationManager notificationManager;
private final ApiContext apiContext;
private final NotificationManager notificationManager;
@Autowired
public NotificationScheduleJob(ApiContext apiContext, NotificationManager notificationManager) {

View File

@ -1,5 +1,6 @@
package eu.eudat.models.data.components.commons.datafield;
import eu.eudat.logic.utilities.helpers.FieldFactory;
import eu.eudat.logic.utilities.interfaces.XmlSerializable;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
@ -21,6 +22,10 @@ public abstract class FieldData<T> implements XmlSerializable<T> {
return null;
}
public T fromData(Object data, String subType) {
return (T) FieldFactory.fromData(subType, data);
}
public Object toData() {
return null;
}

View File

@ -13,7 +13,7 @@ import java.util.Map;
public class UploadData extends FieldData<UploadData> {
public class Option implements XmlSerializable<UploadData.Option> {
public static class Option implements XmlSerializable<UploadData.Option> {
private String label;
private String value;
@ -77,7 +77,7 @@ public class UploadData extends FieldData<UploadData> {
if (data != null) {
List<Map<String, String>> types = ((Map<String, List<Map<String, String>>>) data).get("types");
for (Map<String, String> map : types) {
UploadData.Option newOption = new UploadData.Option();
UploadData.Option newOption = new Option();
newOption.setLabel(map.get("label"));
newOption.setValue(map.get("value"));
this.types.add(newOption);
@ -125,7 +125,7 @@ public class UploadData extends FieldData<UploadData> {
for (int temp = 0; temp < optionElements.getLength(); temp++) {
Node optionElement = optionElements.item(temp);
if (optionElement.getNodeType() == Node.ELEMENT_NODE) {
this.types.add(new UploadData.Option().fromXml((Element) optionElement));
this.types.add(new Option().fromXml((Element) optionElement));
}
}
}

View File

@ -1,5 +1,7 @@
package eu.eudat.models.data.dmp;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.*;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import eu.eudat.models.DataModel;
@ -7,18 +9,21 @@ import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
import eu.eudat.models.data.dynamicfields.DynamicFieldWithValue;
import eu.eudat.models.data.entities.xmlmodels.dmpprofiledefinition.DataManagementPlanProfile;
import eu.eudat.models.data.funder.Funder;
import eu.eudat.models.data.helpermodels.Tuple;
import eu.eudat.models.data.listingmodels.DatasetListingModel;
import eu.eudat.models.data.listingmodels.UserInfoListingModel;
import eu.eudat.models.data.grant.Grant;
import eu.eudat.models.data.helpermodels.Tuple;
import eu.eudat.models.data.listingmodels.UserInfoListingModel;
import eu.eudat.models.data.project.Project;
import eu.eudat.models.data.userinfo.UserListingModel;
import net.minidev.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
private static final ObjectMapper objectMapper = new ObjectMapper();
private static final Logger logger = LoggerFactory.getLogger(DataManagementPlan.class);
private UUID id;
private String label;
private UUID groupId;
@ -233,15 +238,16 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
@Override
public DataManagementPlan fromDataModel(DMP entity) {
try {
this.id = entity.getId();
this.profile = entity.getProfile() != null ? new Tuple<UUID, String>(entity.getProfile().getId(), entity.getProfile().getLabel()) : null;
this.organisations = entity.getOrganisations() != null ? entity.getOrganisations().stream().map(item -> new Organisation().fromDataModel(item)).collect(Collectors.toList()) : new ArrayList<>();
this.researchers = entity.getResearchers() != null ? entity.getResearchers().stream().map(item -> new Researcher().fromDataModel(item)).collect(Collectors.toList()): new ArrayList<>();
this.researchers = entity.getResearchers() != null ? entity.getResearchers().stream().map(item -> new Researcher().fromDataModel(item)).collect(Collectors.toList()) : new ArrayList<>();
this.version = entity.getVersion();
this.groupId = this.groupId == null ? null : entity.getGroupId();
this.label = entity.getLabel();
this.grant = new Grant();
this.properties = entity.getProperties() != null ? new org.json.JSONObject(entity.getProperties()).toMap() : null;
this.properties = entity.getProperties() != null ? objectMapper.readValue(entity.getProperties(), LinkedHashMap.class) : null;
this.grant.fromDataModel(entity.getGrant());
this.creator = new eu.eudat.models.data.userinfo.UserInfo();
this.groupId = entity.getGroupId();
@ -258,7 +264,7 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
if (entity.getAssociatedDmps() != null && !entity.getAssociatedDmps().isEmpty()) {
this.profiles = new LinkedList<>();
for (DatasetProfile datasetProfile: entity.getAssociatedDmps()) {
for (DatasetProfile datasetProfile : entity.getAssociatedDmps()) {
AssociatedProfile associatedProfile = new AssociatedProfile().fromData(datasetProfile);
this.profiles.add(associatedProfile);
}
@ -293,7 +299,10 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
}
this.isPublic = entity.isPublic();
this.extraProperties = entity.getExtraProperties() != null ? new org.json.JSONObject(entity.getExtraProperties()).toMap() : null;
this.extraProperties = entity.getExtraProperties() != null ? objectMapper.readValue(entity.getExtraProperties(), LinkedHashMap.class) : null;
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
return this;
}
@ -342,6 +351,7 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
}
public DataManagementPlan fromDataModelNoDatasets(DMP entity) {
try {
this.id = entity.getId();
this.profile = entity.getProfile() != null ? new Tuple<UUID, String>(entity.getProfile().getId(), entity.getProfile().getLabel()) : null;
this.organisations = entity.getOrganisations() != null ? entity.getOrganisations().stream().map(item -> new Organisation().fromDataModel(item)).collect(Collectors.toList()) : new ArrayList<>();
@ -349,7 +359,7 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
this.version = entity.getVersion();
this.label = entity.getLabel();
this.grant = new Grant();
this.properties = entity.getProperties() != null ? new org.json.JSONObject(entity.getProperties()).toMap() : null;
this.properties = entity.getProperties() != null ? objectMapper.readValue(entity.getProperties(), LinkedHashMap.class) : null;
this.creator = new eu.eudat.models.data.userinfo.UserInfo();
this.groupId = entity.getGroupId();
this.lockable = entity.getDataset() != null && entity.getDataset().stream().findAny().isPresent();
@ -365,7 +375,7 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
if (entity.getAssociatedDmps() != null && !entity.getAssociatedDmps().isEmpty()) {
this.profiles = new LinkedList<>();
for (DatasetProfile datasetProfile: entity.getAssociatedDmps()) {
for (DatasetProfile datasetProfile : entity.getAssociatedDmps()) {
AssociatedProfile associatedProfile = new AssociatedProfile().fromData(datasetProfile);
this.profiles.add(associatedProfile);
}
@ -389,7 +399,10 @@ public class DataManagementPlan implements DataModel<DMP, DataManagementPlan> {
}
this.isPublic = entity.isPublic();
this.extraProperties = entity.getExtraProperties() != null ? new org.json.JSONObject(entity.getExtraProperties()).toMap() : null;
this.extraProperties = entity.getExtraProperties() != null ? objectMapper.readValue(entity.getExtraProperties(), LinkedHashMap.class) : null;
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
return this;
}

View File

@ -1,5 +1,8 @@
package eu.eudat.models.data.dmp;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.*;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import eu.eudat.models.DataModel;
@ -14,11 +17,15 @@ import eu.eudat.models.data.grant.GrantDMPEditorModel;
import eu.eudat.models.data.project.ProjectDMPEditorModel;
import eu.eudat.models.data.userinfo.UserListingModel;
import net.minidev.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
public class DataManagementPlanEditorModel implements DataModel<DMP, DataManagementPlanEditorModel> {
private static final ObjectMapper objectMapper = new ObjectMapper();
private static final Logger logger = LoggerFactory.getLogger(DataManagementPlanEditorModel.class);
private UUID id;
private String label;
private UUID groupId;
@ -224,6 +231,7 @@ public class DataManagementPlanEditorModel implements DataModel<DMP, DataManagem
@Override
public DataManagementPlanEditorModel fromDataModel(DMP entity) {
try {
this.id = entity.getId();
this.profile = entity.getProfile() != null ? new Tuple<UUID, String>(entity.getProfile().getId(), entity.getProfile().getLabel()) : null;
this.organisations = entity.getOrganisations().stream().map(item -> new Organisation().fromDataModel(item)).collect(Collectors.toList());
@ -232,7 +240,7 @@ public class DataManagementPlanEditorModel implements DataModel<DMP, DataManagem
this.groupId = this.groupId == null ? null : entity.getGroupId();
this.label = entity.getLabel();
this.grant = new GrantDMPEditorModel();
this.properties = entity.getProperties() != null ? new org.json.JSONObject(entity.getProperties()).toMap() : null;
this.properties = entity.getProperties() != null ? objectMapper.readValue(entity.getProperties(), LinkedHashMap.class) : null;
this.grant.getExistGrant().fromDataModel(entity.getGrant());
this.grant.getExistGrant().setSource("");
this.creator = new eu.eudat.models.data.userinfo.UserInfo();
@ -250,7 +258,7 @@ public class DataManagementPlanEditorModel implements DataModel<DMP, DataManagem
if (entity.getAssociatedDmps() != null && !entity.getAssociatedDmps().isEmpty()) {
this.profiles = new LinkedList<>();
for (DatasetProfile datasetProfile: entity.getAssociatedDmps()) {
for (DatasetProfile datasetProfile : entity.getAssociatedDmps()) {
AssociatedProfile associatedProfile = new AssociatedProfile().fromData(datasetProfile);
this.profiles.add(associatedProfile);
}
@ -266,7 +274,10 @@ public class DataManagementPlanEditorModel implements DataModel<DMP, DataManagem
this.users = entity.getUsers().stream().map(item -> new UserInfoListingModel().fromDataModel(item)).collect(Collectors.toList());
this.funder = new FunderDMPEditorModel();
this.funder.getExistFunder().fromDataModel(entity.getGrant().getFunder());
this.extraProperties = entity.getExtraProperties() != null ? new org.json.JSONObject(entity.getExtraProperties()).toMap() : null;
this.extraProperties = entity.getExtraProperties() != null ? objectMapper.readValue(entity.getExtraProperties(), LinkedHashMap.class) : null;
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
return this;
}

View File

@ -1,44 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.UserInfo;
public class ContactRDAExportModel {
private String mbox;
private String name;
private IdRDAExportModel contact_id;
public String getMbox() {
return mbox;
}
public void setMbox(String mbox) {
this.mbox = mbox;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public IdRDAExportModel getContact_id() {
return contact_id;
}
public void setContact_id(IdRDAExportModel contact_id) {
this.contact_id = contact_id;
}
public ContactRDAExportModel fromDataModel(UserInfo entity) {
ContactRDAExportModel contact = new ContactRDAExportModel();
contact.mbox = entity.getEmail();
contact.name = entity.getName();
// TODO: we should use a contact_id and not our UUID.
if (!entity.getId().toString().isEmpty()) {
contact.contact_id = new IdRDAExportModel(entity.getId().toString(), "other");
}
else {
contact.contact_id = null;
}
return contact;
}
}

View File

@ -1,93 +0,0 @@
package eu.eudat.models.data.rda;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.List;
public class DatasetDistributionRDAExportModel {
private String access_url;
private String available_till;
private String byte_size;
private String data_access; // Allowed values: open / shared / closed
private String description;
private String download_url;
private List<String> format; // Format according to: https://www.iana.org/assignments/media-types/media-types.xhtml if appropriate, otherwise use the common name for this format
private HostRDAExportModel host;
private List<LicenseRDAExportModel> license;
private String title;
public String getAccess_url() {
return access_url;
}
public void setAccess_url(String access_url) {
this.access_url = access_url;
}
public String getAvailable_till() {
return available_till;
}
public void setAvailable_till(String available_till) {
this.available_till = available_till;
}
public String getByte_size() {
return byte_size;
}
public void setByte_size(String byte_size) {
this.byte_size = byte_size;
}
public String getData_access() {
return data_access;
}
public void setData_access(String data_access) {
this.data_access = data_access;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getDownload_url() {
return download_url;
}
public void setDownload_url(String download_url) {
this.download_url = download_url;
}
public List<String> getFormat() {
return format;
}
public void setFormat(List<String> format) {
this.format = format;
}
public HostRDAExportModel getHost() {
return host;
}
public void setHost(HostRDAExportModel host) {
this.host = host;
}
public List<LicenseRDAExportModel> getLicense() {
return license;
}
public void setLicense(List<LicenseRDAExportModel> license) {
this.license = license;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
@JsonIgnore
public boolean isValid() {
return title != null || data_access != null;
}
}

View File

@ -1,47 +0,0 @@
package eu.eudat.models.data.rda;
import com.fasterxml.jackson.annotation.JsonIgnore;
public class DatasetMetadataRDAExportModel {
private String description; // Not mandatory.
private String language;
private IdRDAExportModel metadata_standard_id;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public IdRDAExportModel getMetadata_standard_id() {
return metadata_standard_id;
}
public void setMetadata_standard_id(IdRDAExportModel metadata_standard_id) {
this.metadata_standard_id = metadata_standard_id;
}
public DatasetMetadataRDAExportModel fromDataModel(String key, Object value) {
DatasetMetadataRDAExportModel metadataRDAExportModel = new DatasetMetadataRDAExportModel();
if (key.contains("metadata_standard_id"))
metadataRDAExportModel.setMetadata_standard_id(new IdRDAExportModel(value.toString(), "other"));
else if (key.contains("language"))
metadataRDAExportModel.setLanguage(value.toString());
else if (key.contains("description"))
metadataRDAExportModel.setDescription(value.toString());
return metadataRDAExportModel;
}
@JsonIgnore
public boolean isValid() {
return description != null || language != null || metadata_standard_id != null;
}
}

View File

@ -1,474 +0,0 @@
package eu.eudat.models.data.rda;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.JsonPath;
import eu.eudat.data.entities.Dataset;
import eu.eudat.logic.managers.DatasetManager;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import eu.eudat.models.data.security.Principal;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.*;
import java.text.DateFormat;
import java.util.*;
import static java.util.stream.Collectors.groupingBy;
public class DatasetRDAExportModel {
private static final Logger logger = LoggerFactory.getLogger(DatasetRDAExportModel.class);
private static final ObjectMapper mapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private Map<String, String> multiplicityIdToFieldSetId = new HashMap<>();
private List<String> data_quality_assurance;
private IdRDAExportModel dataset_id;
private String description;
private List<DatasetDistributionRDAExportModel> distribution;
private String issued; // Created Date, could also use finalized one.
private List<String> keyword;
private String language;
private List<DatasetMetadataRDAExportModel> metadata;
private String personal_data; // Allowed Values: yes no unknown.
private String preservation_statement;
private List<DatasetSecurityAndPrivacyRDAExportModel> security_and_privacy;
private String sensitive_data; // Allowed Values: yes no unknown.
private List<DatasetTechnicalResourceRDAExportModel> technical_resource;
private String title;
private String type; // Type according to: http://vocabularies.coar-repositories.org/pubby/resource_type.html
public List<String> getData_quality_assurance() {
return data_quality_assurance;
}
public void setData_quality_assurance(List<String> data_quality_assurance) {
this.data_quality_assurance = data_quality_assurance;
}
public IdRDAExportModel getDataset_id() {
return dataset_id;
}
public void setDataset_id(IdRDAExportModel dataset_id) {
this.dataset_id = dataset_id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public List<DatasetDistributionRDAExportModel> getDistribution() {
return distribution;
}
public void setDistribution(List<DatasetDistributionRDAExportModel> distribution) {
this.distribution = distribution;
}
public String getIssued() {
return issued;
}
public void setIssued(String issued) {
this.issued = issued;
}
public List<String> getKeyword() {
return keyword;
}
public void setKeyword(List<String> keyword) {
this.keyword = keyword;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public List<DatasetMetadataRDAExportModel> getMetadata() {
return metadata;
}
public void setMetadata(List<DatasetMetadataRDAExportModel> metadata) {
this.metadata = metadata;
}
public String getPersonal_data() {
return personal_data;
}
public void setPersonal_data(String personal_data) {
this.personal_data = personal_data;
}
public String getPreservation_statement() {
return preservation_statement;
}
public void setPreservation_statement(String preservation_statement) {
this.preservation_statement = preservation_statement;
}
public List<DatasetSecurityAndPrivacyRDAExportModel> getSecurity_and_privacy() {
return security_and_privacy;
}
public void setSecurity_and_privacy(List<DatasetSecurityAndPrivacyRDAExportModel> security_and_privacy) {
this.security_and_privacy = security_and_privacy;
}
public String getSensitive_data() {
return sensitive_data;
}
public void setSensitive_data(String sensitive_data) {
this.sensitive_data = sensitive_data;
}
public List<DatasetTechnicalResourceRDAExportModel> getTechnical_resource() {
return technical_resource;
}
public void setTechnical_resource(List<DatasetTechnicalResourceRDAExportModel> technical_resource) {
this.technical_resource = technical_resource;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public DatasetRDAExportModel fromDataModel(Dataset dataset, DatasetManager datasetManager, Principal principal) {
// Map of template Ids to rda values.
JSONObject jObject = new JSONObject(dataset.getProperties());
Map<String, Object> templateIdsToValues = jObject.toMap();
/*--------- Building dataset rda export model ---------*/
DatasetRDAExportModel datasetRDAExportModel = new DatasetRDAExportModel();
datasetRDAExportModel.setDataset_id(new IdRDAExportModel(dataset.getId().toString(), "other"));
if (dataset.getDescription() != null) datasetRDAExportModel.setDescription(dataset.getDescription().replace("\n", " "));
datasetRDAExportModel.setIssued(DateFormat.getDateInstance(DateFormat.SHORT).format(dataset.getCreated()));
datasetRDAExportModel.setLanguage("en"); // mock data
datasetRDAExportModel.setTitle(dataset.getLabel());
// Transform the answered dataset description to json so we can parse it and fill the rda model.
JSONObject datasetDescriptionJson = null;
try {
String jsonResult = mapper.writeValueAsString(datasetManager.getSingle(dataset.getId().toString(), principal).getDatasetProfileDefinition());
datasetDescriptionJson = new JSONObject(jsonResult);
} catch (JsonProcessingException e) {
logger.error(e.getMessage(), e);
}
setMultiplicityIdToFieldSetId(datasetDescriptionJson);
/*--------- Building personal data. ---------*/
String personalData = buildSingleProperties("dataset.personal_data", datasetDescriptionJson, templateIdsToValues);
if (personalData != null) {
datasetRDAExportModel.setPersonal_data(personalData);
} else {
datasetRDAExportModel.setPersonal_data("unknown");
}
/*--------- Building preservation statement. ---------*/
datasetRDAExportModel.setPreservation_statement(buildSingleProperties("dataset.preservation_statement", datasetDescriptionJson, templateIdsToValues));
/*--------- Building sensitive data. ---------*/
String sensitiveData = buildSingleProperties("dataset.sensitive_data", datasetDescriptionJson, templateIdsToValues);
if (personalData != null) {
datasetRDAExportModel.setSensitive_data(sensitiveData);
} else {
datasetRDAExportModel.setSensitive_data("unknown");
}
/*--------- Building type. ---------*/
datasetRDAExportModel.setType(buildSingleProperties("dataset.type", datasetDescriptionJson, templateIdsToValues));
/*--------- Building data_quality_assurance. ---------*/
datasetRDAExportModel.setData_quality_assurance(buildDataQualityAssurance(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
/*--------- Building distribution. ---------*/
datasetRDAExportModel.setDistribution(buildDistribution(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
/*--------- Building keywords. ---------*/
datasetRDAExportModel.setKeyword(buildKeywords(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
/*--------- Building metadata items. ---------*/
datasetRDAExportModel.setMetadata(buildMetadata(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
/*--------- Building security and privacy items. ---------*/
datasetRDAExportModel.setSecurity_and_privacy(buildSecurityAndPrivacy(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
/*--------- Building technical_resource. ---------*/
datasetRDAExportModel.setTechnical_resource(buildTechnicalResource(datasetDescriptionJson, templateIdsToValues, dataset.getProfile().getDefinition()));
return datasetRDAExportModel;
}
private String buildSingleProperties(String rdaKey, JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues) {
String expression = "$..fields[*][?(@.rdaProperty == \"" + rdaKey + "\" )].id";
List<String> list = jsonValueListFromExpression(datasetDescriptionJson, expression);
if (!list.isEmpty()) {
return templateIdsToValues.get(list.get(0)).toString();
} else {
return null;
}
}
private List<String> buildDataQualityAssurance(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
List<RdaField> dataQualityFields = getRDAFieldsFromJson(datasetDescriptionJson, new String[]{"dataset.data_quality_assurance"}, datasetProfileDefinition);
for (RdaField rdaField : dataQualityFields) {
rdaField.setRdaValue(templateIdsToValues.get(rdaField.getFieldId()).toString());
}
List<String> dataQualityAssuranceList = new LinkedList<>();
for (RdaField rdaField : dataQualityFields) {
dataQualityAssuranceList.add(rdaField.getRdaValue());
}
return dataQualityAssuranceList;
}
private List<DatasetDistributionRDAExportModel> buildDistribution(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
DatasetDistributionRDAExportModel distributionModel = new DatasetDistributionRDAExportModel();
distributionModel.setAccess_url(buildSingleProperties("dataset.distribution.access_url", datasetDescriptionJson, templateIdsToValues));
distributionModel.setAvailable_till(buildSingleProperties("dataset.distribution.available_till", datasetDescriptionJson, templateIdsToValues));
distributionModel.setByte_size(buildSingleProperties("dataset.distribution.byte_size", datasetDescriptionJson, templateIdsToValues));
distributionModel.setData_access(buildSingleProperties("dataset.distribution.data_access", datasetDescriptionJson, templateIdsToValues));
distributionModel.setDescription(buildSingleProperties("dataset.distribution.description", datasetDescriptionJson, templateIdsToValues));
distributionModel.setDownload_url(buildSingleProperties("dataset.distribution.download_url", datasetDescriptionJson, templateIdsToValues));
distributionModel.setTitle(buildSingleProperties("dataset.distribution.title", datasetDescriptionJson, templateIdsToValues));
/*--------- Building format. ---------*/
// We currently support the return of only one distribution.
List<DatasetDistributionRDAExportModel> distributionList = new LinkedList<>();
if (distributionModel.isValid()) {
distributionList.add(distributionModel);
} else {
DatasetDistributionRDAExportModel model = new DatasetDistributionRDAExportModel();
model.setDescription("Distribution data was not valid");
distributionList.add(model);
}
return distributionList;
}
private List<String> buildKeywords(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
List<RdaField> keywordFields = getRDAFieldsFromJson(datasetDescriptionJson, new String[]{"dataset.keyword"}, datasetProfileDefinition);
for (RdaField rdaField : keywordFields) {
rdaField.setRdaValue(templateIdsToValues.get(rdaField.getFieldId()).toString());
}
List<String> keywordsList = new LinkedList<>();
for (RdaField rdaField : keywordFields) {
keywordsList.add(rdaField.getRdaValue());
}
return keywordsList;
}
private List<DatasetMetadataRDAExportModel> buildMetadata(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
List<RdaField> metadataFields = getRDAFieldsFromJson(datasetDescriptionJson,
new String[]{"dataset.metadata.metadata_standard_id.type", "dataset.metadata.metadata_standard_id.identifier", "dataset.metadata.description", "dataset.metadata.language", "dataset.metadata.metadata_standard_id"},
datasetProfileDefinition);
// Adding rdaValue and FieldSetIds on metadataFields.
for (RdaField rdaField : metadataFields) {
rdaField.setRdaValue(templateIdsToValues.get(rdaField.getFieldId()).toString());
}
// Group metadataFields based on their field set id.
Map<String, List<RdaField>> groupedMetadataFields = metadataFields.stream().collect(groupingBy(RdaField::getFieldSetId));
// Creating the metadata.
List<DatasetMetadataRDAExportModel> metadataRDAExportModelList = new LinkedList<>();
for (String fieldSetId : groupedMetadataFields.keySet()) {
DatasetMetadataRDAExportModel metadataRda = new DatasetMetadataRDAExportModel();
for (RdaField rdaField : groupedMetadataFields.get(fieldSetId)) {
if (rdaField.getRdaProperty().equals("dataset.metadata.metadata_standard_id.identifier")) {
if (metadataRda.getMetadata_standard_id() != null) {
metadataRda.getMetadata_standard_id().setIdentifier(rdaField.getRdaValue());
} else {
metadataRda.setMetadata_standard_id(new IdRDAExportModel(rdaField.getRdaValue(), "other"));
}
}
if (rdaField.getRdaProperty().equals("dataset.metadata.metadata_standard_id.type")) {
if (metadataRda.getMetadata_standard_id() != null) {
metadataRda.getMetadata_standard_id().setType(rdaField.getRdaValue());
} else {
metadataRda.setMetadata_standard_id(new IdRDAExportModel("", rdaField.getRdaValue()));
}
}
if (rdaField.getRdaProperty().equals("dataset.metadata.description")) {
metadataRda.setDescription(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.metadata.language")) {
metadataRda.setLanguage(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.metadata.metadata_standard_id") && !rdaField.getRdaValue().isEmpty()) {
JSONArray jsonArray = new JSONArray(rdaField.getRdaValue());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
Map<String, Object> jsonObjectMap = jsonObject.toMap();
DatasetMetadataRDAExportModel metadataRda1 = new DatasetMetadataRDAExportModel();
// metadataRda1.setMetadata_standard_id(new IdRDAExportModel(jsonObjectMap.get("label").toString(), jsonObjectMap.get("source").toString()));
metadataRda1.setMetadata_standard_id(new IdRDAExportModel(jsonObjectMap.get("uri").toString(), "url"));
metadataRDAExportModelList.add(metadataRda1);
}
}
}
if (metadataRda.isValid()) {
metadataRDAExportModelList.add(metadataRda);
}
}
return new LinkedList<>(metadataRDAExportModelList);
}
private List<DatasetSecurityAndPrivacyRDAExportModel> buildSecurityAndPrivacy(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
List<RdaField> secAndPrFields = getRDAFieldsFromJson(
datasetDescriptionJson,
new String[]{"dataset.security_and_privacy.description", "dataset.security_and_privacy.title", "dataset.security_and_privacy"},
datasetProfileDefinition);
for (RdaField rdaField : secAndPrFields) {
rdaField.setRdaValue(templateIdsToValues.get(rdaField.getFieldId()).toString());
}
Map<String, List<RdaField>> groupedSecurityAndPrivacyFields = secAndPrFields.stream().collect(groupingBy(RdaField::getFieldSetId));
List<DatasetSecurityAndPrivacyRDAExportModel> securityAndPrivacyRDAExportModelList = new LinkedList<>();
for (String fieldSetId : groupedSecurityAndPrivacyFields.keySet()) {
DatasetSecurityAndPrivacyRDAExportModel securityAndPrivacyModel = new DatasetSecurityAndPrivacyRDAExportModel();
for (RdaField rdaField : groupedSecurityAndPrivacyFields.get(fieldSetId)) {
if (rdaField.getRdaProperty().equals("dataset.security_and_privacy.description")) {
securityAndPrivacyModel.setDescription(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.security_and_privacy.title")) {
securityAndPrivacyModel.setTitle(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.security_and_privacy")) {
JSONArray jsonArray = new JSONArray(rdaField.getRdaValue());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
Map<String, Object> jsonObjectMap = jsonObject.toMap();
DatasetSecurityAndPrivacyRDAExportModel secAndPrivacy = new DatasetSecurityAndPrivacyRDAExportModel(jsonObjectMap.get("label").toString(), jsonObjectMap.get("source").toString());
securityAndPrivacyRDAExportModelList.add(secAndPrivacy);
}
}
}
securityAndPrivacyRDAExportModelList.add(securityAndPrivacyModel);
}
return securityAndPrivacyRDAExportModelList;
}
private List<DatasetTechnicalResourceRDAExportModel> buildTechnicalResource(JSONObject datasetDescriptionJson, Map<String, Object> templateIdsToValues, String datasetProfileDefinition) {
List<RdaField> dataQualityFields = getRDAFieldsFromJson(datasetDescriptionJson,
new String[]{"dataset.technical_resource.technical_resource", "dataset.technical_resource.technical_resource.description", "dataset.technical_resource.technical_resource.name"},
datasetProfileDefinition);
for (RdaField rdaField : dataQualityFields) {
rdaField.setRdaValue(templateIdsToValues.get(rdaField.getFieldId()).toString());
}
List<DatasetTechnicalResourceRDAExportModel> technicalResourceList = new LinkedList<>();
Map<String, List<RdaField>> groupedDataQualityFields = dataQualityFields.stream().collect(groupingBy(RdaField::getFieldSetId));
for (String fieldSetId : groupedDataQualityFields.keySet()) {
DatasetTechnicalResourceRDAExportModel technicalResourceModel = new DatasetTechnicalResourceRDAExportModel();
for (RdaField rdaField : groupedDataQualityFields.get(fieldSetId)) {
if (rdaField.getRdaProperty().equals("dataset.technical_resource.technical_resource.description")) {
technicalResourceModel.setDescription(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.technical_resource.technical_resource.name")) {
technicalResourceModel.setName(rdaField.getRdaValue());
}
if (rdaField.getRdaProperty().equals("dataset.security_and_privacy")) {
JSONArray jsonArray = new JSONArray(rdaField.getRdaValue());
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject jsonObject = jsonArray.getJSONObject(i);
Map<String, Object> jsonObjectMap = jsonObject.toMap();
DatasetTechnicalResourceRDAExportModel technicalResource = new DatasetTechnicalResourceRDAExportModel(jsonObjectMap.get("label").toString(), jsonObjectMap.get("label").toString());
technicalResourceList.add(technicalResource);
}
}
}
technicalResourceList.add(technicalResourceModel);
}
return technicalResourceList;
}
private void setMultiplicityIdToFieldSetId(JSONObject json) {
String multiplicityItemsFieldSetIdExp = "$..multiplicityItems[*].id";
List<String> multiplicityItemsFieldSetIdList = jsonValueListFromExpression(json, multiplicityItemsFieldSetIdExp);
for (String fieldSetId : multiplicityItemsFieldSetIdList) {
String fieldsFromFieldSetIdExp = "$..multiplicityItems[*][?(@.id == \""+ fieldSetId +"\")].fields[*].id";
List<String> fieldsIdList = jsonValueListFromExpression(json, fieldsFromFieldSetIdExp);
for (String fieldId : fieldsIdList) {
this.multiplicityIdToFieldSetId.put(fieldId, fieldSetId);
}
}
}
private List<RdaField> getRDAFieldsFromJson(JSONObject json, String[] rdaKey, String datasetProfileDefinition) {
List<RdaField> rdaFields = new LinkedList<>();
for (String key : rdaKey) {
String fieldIdExpression = "$..fields[*][?(@.rdaProperty == \"" + key + "\" )].id";
List<String> listFromExpression = jsonValueListFromExpression(json, fieldIdExpression);
for (String fieldId : listFromExpression) {
RdaField rdaField = new RdaField();
rdaField.setRdaProperty(key);
rdaField.setFieldId(fieldId);
if (fieldId.startsWith("multiple_")) {
rdaField.setFieldSetId(this.multiplicityIdToFieldSetId.get(fieldId));
} else {
rdaField.setFieldSetId(getFieldSetIdForFieldFromXML(datasetProfileDefinition, fieldId));
}
rdaFields.add(rdaField);
}
}
return rdaFields;
}
private List<String> jsonValueListFromExpression(JSONObject json, String expression) {
net.minidev.json.JSONArray jsonArray = JsonPath.parse(json.toString()).read(expression);
List<String> valueList = new LinkedList<>();
for (Object o : jsonArray) {
valueList.add(o.toString());
}
return valueList;
}
private String getFieldSetIdForFieldFromXML(String datasetProfileDefinition, String fieldId) {
String fieldSetIdExpression = "//field[@id ='" + fieldId + "']/ancestor::fieldSet/@id";
List<String> listFromExpression = xmlValueListFromExpression(datasetProfileDefinition, fieldSetIdExpression);
if (listFromExpression.size() == 1) return listFromExpression.get(0);
return null;
}
private List<String> xmlValueListFromExpression(String xml, String expression) {
List<String> valuesList = new LinkedList<>();
Document document = XmlBuilder.fromXml(xml);
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
try {
XPathExpression expr = xpath.compile(expression);
NodeList nodeList = (NodeList) expr.evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
valuesList.add(node.getNodeValue());
}
} catch (XPathExpressionException e) {
logger.error(e.getMessage(), e);
}
return valuesList;
}
}

View File

@ -1,29 +0,0 @@
package eu.eudat.models.data.rda;
public class DatasetSecurityAndPrivacyRDAExportModel {
private String description;
private String title;
public DatasetSecurityAndPrivacyRDAExportModel() {
}
public DatasetSecurityAndPrivacyRDAExportModel(String description, String title) {
this.description = description;
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}

View File

@ -1,28 +0,0 @@
package eu.eudat.models.data.rda;
public class DatasetTechnicalResourceRDAExportModel {
private String description;
private String name;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public DatasetTechnicalResourceRDAExportModel(String description, String name) {
this.description = description;
this.name = name;
}
public DatasetTechnicalResourceRDAExportModel() {
}
}

View File

@ -1,52 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.UserInfo;
import java.util.LinkedList;
import java.util.List;
public class DmpContributorRDAExportModel {
private IdRDAExportModel contributor_id;
private String mbox;
private String name;
private List<String> role;
public IdRDAExportModel getContributor_id() {
return contributor_id;
}
public void setContributor_id(IdRDAExportModel contributor_id) {
this.contributor_id = contributor_id;
}
public String getMbox() {
return mbox;
}
public void setMbox(String mbox) {
this.mbox = mbox;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getRole() {
return role;
}
public void setRole(List<String> role) {
this.role = role;
}
public DmpContributorRDAExportModel fromDataModel(UserInfo user, String role) {
DmpContributorRDAExportModel contributor = new DmpContributorRDAExportModel();
contributor.contributor_id = new IdRDAExportModel(user.getId().toString(), "other");
contributor.mbox = user.getEmail();
contributor.name = user.getName();
contributor.role = new LinkedList<>();
contributor.role.add(role);
return contributor;
}
}

View File

@ -1,36 +0,0 @@
package eu.eudat.models.data.rda;
public class DmpCostRDAExportModel {
private String currency_code; //Allowed values defined by ISO 4217.
private String description;
private String title;
private String value;
public String getCurrency_code() {
return currency_code;
}
public void setCurrency_code(String currency_code) {
this.currency_code = currency_code;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}

View File

@ -1,164 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.DMP;
import eu.eudat.data.entities.Dataset;
import eu.eudat.data.entities.UserDMP;
import eu.eudat.data.entities.UserInfo;
import eu.eudat.logic.managers.DatasetManager;
import eu.eudat.models.data.security.Principal;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
public class DmpRDAExportModel {
private ContactRDAExportModel contact;
private List<DmpContributorRDAExportModel> contributor;
private List<DmpCostRDAExportModel> cost;
private String created;
private List<DatasetRDAExportModel> dataset;
private String description;
private IdRDAExportModel dmp_id;
private String ethical_issues_description;
private String ethical_issues_exist; // Allowed Values: yes no unknown.
private String ethical_issues_report;
private String language;
private String modified;
private ProjectRDAExportModel project;
private String title;
public ContactRDAExportModel getContact() {
return contact;
}
public void setContact(ContactRDAExportModel contact) {
this.contact = contact;
}
public List<DmpContributorRDAExportModel> getContributor() {
return contributor;
}
public void setContributor(List<DmpContributorRDAExportModel> contributor) {
this.contributor = contributor;
}
public List<DmpCostRDAExportModel> getCost() {
return cost;
}
public void setCost(List<DmpCostRDAExportModel> cost) {
this.cost = cost;
}
public String getCreated() {
return created;
}
public void setCreated(String created) {
this.created = created;
}
public List<DatasetRDAExportModel> getDataset() {
return dataset;
}
public void setDataset(List<DatasetRDAExportModel> dataset) {
this.dataset = dataset;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public IdRDAExportModel getDmp_id() {
return dmp_id;
}
public void setDmp_id(IdRDAExportModel dmp_id) {
this.dmp_id = dmp_id;
}
public String getEthical_issues_description() {
return ethical_issues_description;
}
public void setEthical_issues_description(String ethical_issues_description) {
this.ethical_issues_description = ethical_issues_description;
}
public String getEthical_issues_exist() {
return ethical_issues_exist;
}
public void setEthical_issues_exist(String ethical_issues_exist) {
this.ethical_issues_exist = ethical_issues_exist;
}
public String getEthical_issues_report() {
return ethical_issues_report;
}
public void setEthical_issues_report(String ethical_issues_report) {
this.ethical_issues_report = ethical_issues_report;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public String getModified() {
return modified;
}
public void setModified(String modified) {
this.modified = modified;
}
public ProjectRDAExportModel getProject() {
return project;
}
public void setProject(ProjectRDAExportModel project) {
this.project = project;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public DmpRDAExportModel fromDataModel(DMP entity, DatasetManager datasetManager, Principal principal) {
DmpRDAExportModel dmpRda = new DmpRDAExportModel();
dmpRda.contact = new ContactRDAExportModel().fromDataModel(entity.getUsers().stream().filter(x -> x.getRole().equals(UserDMP.UserDMPRoles.OWNER.getValue())).findFirst().get().getUser());
if (entity.getUsers().stream().anyMatch(x -> x.getRole().equals(UserDMP.UserDMPRoles.USER.getValue()))) {
dmpRda.contributor = new LinkedList<>();
for (UserDMP userdmp : entity.getUsers().stream().filter(x -> x.getRole().equals(UserDMP.UserDMPRoles.USER.getValue())).collect(Collectors.toList())) {
dmpRda.contributor.add(new DmpContributorRDAExportModel().fromDataModel(userdmp.getUser(), UserDMP.UserDMPRoles.fromInteger(userdmp.getRole()).toString()));
}
}
dmpRda.cost = null;
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy");
dmpRda.created = formatter.format(entity.getCreated());
dmpRda.dataset = new LinkedList<>();
for (Dataset dataset : entity.getDataset()) {
if (dataset.getStatus() != Dataset.Status.DELETED.getValue() && dataset.getStatus() != Dataset.Status.CANCELED.getValue())
dmpRda.dataset.add(new DatasetRDAExportModel().fromDataModel(dataset, datasetManager, principal));
}
dmpRda.description = entity.getDescription().replace("\n", " ");
if (entity.getDoi() != null) {
dmpRda.dmp_id = new IdRDAExportModel(entity.getDoi(), "zenodo");
}
else {
dmpRda.dmp_id = new IdRDAExportModel(entity.getId().toString(), "other");
}
// Mock up data on "language" and "ethical_issues_*" for now.
dmpRda.ethical_issues_exist = "unknown";
dmpRda.language = "en";
dmpRda.modified = formatter.format(new Date());
dmpRda.project = new ProjectRDAExportModel().fromDataModel(entity.getGrant());
dmpRda.title = entity.getLabel();
return dmpRda;
}
}

View File

@ -1,43 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.Funder;
import eu.eudat.data.entities.Grant;
public class FundingRDAExportModel {
private IdRDAExportModel funder_id;
private IdRDAExportModel grant_id;
private String funding_status;
public IdRDAExportModel getFunder_id() {
return funder_id;
}
public void setFunder_id(IdRDAExportModel funder_id) {
this.funder_id = funder_id;
}
public IdRDAExportModel getGrant_id() {
return grant_id;
}
public void setGrant_id(IdRDAExportModel grant_id) {
this.grant_id = grant_id;
}
public String getFunding_status() {
return funding_status;
}
public void setFunding_status(String funding_status) {
this.funding_status = funding_status;
}
public FundingRDAExportModel fromDataModel(Funder funder, Grant grant) {
FundingRDAExportModel funding = new FundingRDAExportModel();
funding.funding_status = "planned"; // mock data
if (funder != null) {
funding.funder_id = new IdRDAExportModel(funder.getReference(), "other");
}
if (grant != null) {
funding.grant_id = new IdRDAExportModel(grant.getReference(), "other");
}
return funding;
}
}

View File

@ -1,86 +0,0 @@
package eu.eudat.models.data.rda;
import java.util.List;
public class HostRDAExportModel {
private String availability;
private String backup__frequency;
private String backup_type;
private String certified_with; // Repository certified with one the following standards: DIN31644 / DINI-Zertifikat / DSA / ISO16363 / ISO16919 /TRAC / WDS / CoreTrustSeal
private String description;
private String geo_location; // Physical location of the data expressed using ISO 3166-1 country code.
private List<String> pid_system; // PID System: ark arxiv bibcode doi ean13 eissn handle igsn isbn issn istc lissn lsid pmid purl upc url urn other
private String storage_type;
private String support_versioning; // Allowed values: yes / no / unknown
private String title;
public String getAvailability() {
return availability;
}
public void setAvailability(String availability) {
this.availability = availability;
}
public String getBackup__frequency() {
return backup__frequency;
}
public void setBackup__frequency(String backup__frequency) {
this.backup__frequency = backup__frequency;
}
public String getBackup_type() {
return backup_type;
}
public void setBackup_type(String backup_type) {
this.backup_type = backup_type;
}
public String getCertified_with() {
return certified_with;
}
public void setCertified_with(String certified_with) {
this.certified_with = certified_with;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getGeo_location() {
return geo_location;
}
public void setGeo_location(String geo_location) {
this.geo_location = geo_location;
}
public List<String> getPid_system() {
return pid_system;
}
public void setPid_system(List<String> pid_system) {
this.pid_system = pid_system;
}
public String getStorage_type() {
return storage_type;
}
public void setStorage_type(String storage_type) {
this.storage_type = storage_type;
}
public String getSupport_versioning() {
return support_versioning;
}
public void setSupport_versioning(String support_versioning) {
this.support_versioning = support_versioning;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}

View File

@ -1,25 +0,0 @@
package eu.eudat.models.data.rda;
public class IdRDAExportModel {
private String identifier;
private String type;
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
IdRDAExportModel(String identifier, String type) {
this.identifier = identifier;
this.type = type;
}
}

View File

@ -1,20 +0,0 @@
package eu.eudat.models.data.rda;
public class LicenseRDAExportModel {
private String license_ref;
private String start_date; // If date is set in the future, it indicates embargo period.
public String getLicense_ref() {
return license_ref;
}
public void setLicense_ref(String license_ref) {
this.license_ref = license_ref;
}
public String getStart_date() {
return start_date;
}
public void setStart_date(String start_date) {
this.start_date = start_date;
}
}

View File

@ -1,54 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.Grant;
import java.util.Date;
public class ProjectRDAExportModel {
private String title;
private String description;
private Date project_start;
private Date project_end;
private FundingRDAExportModel funding;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getProject_start() {
return project_start;
}
public void setProject_start(Date project_start) {
this.project_start = project_start;
}
public Date getProject_end() {
return project_end;
}
public void setProject_end(Date project_end) {
this.project_end = project_end;
}
public FundingRDAExportModel getFunding() {
return funding;
}
public void setFunding(FundingRDAExportModel funding) {
this.funding = funding;
}
public ProjectRDAExportModel fromDataModel(Grant grant) {
this.funding = new FundingRDAExportModel().fromDataModel(grant.getFunder(), grant);
return this;
}
}

View File

@ -1,25 +0,0 @@
package eu.eudat.models.data.rda;
import eu.eudat.data.entities.DMP;
import eu.eudat.data.entities.Dataset;
import eu.eudat.logic.managers.DatasetManager;
import eu.eudat.models.data.security.Principal;
import java.util.LinkedList;
import java.util.List;
public class RDAExportModel {
private DmpRDAExportModel dmp;
public DmpRDAExportModel getDmp() {
return dmp;
}
public void setDmp(DmpRDAExportModel dmp) {
this.dmp = dmp;
}
public RDAExportModel fromDataModel(DMP dmp, DatasetManager datasetManager, Principal principal) {
this.dmp = new DmpRDAExportModel().fromDataModel(dmp, datasetManager, principal);
return this;
}
}

View File

@ -1,36 +0,0 @@
package eu.eudat.models.data.rda;
public class RdaField {
private String rdaProperty;
private String rdaValue;
private String fieldId;
private String fieldSetId;
public String getRdaProperty() {
return rdaProperty;
}
public void setRdaProperty(String rdaProperty) {
this.rdaProperty = rdaProperty;
}
public String getRdaValue() {
return rdaValue;
}
public void setRdaValue(String rdaValue) {
this.rdaValue = rdaValue;
}
public String getFieldId() {
return fieldId;
}
public void setFieldId(String fieldId) {
this.fieldId = fieldId;
}
public String getFieldSetId() {
return fieldSetId;
}
public void setFieldSetId(String fieldSetId) {
this.fieldSetId = fieldSetId;
}
}

View File

@ -2,18 +2,14 @@ package eu.eudat.models.data.user.components.datasetprofile;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.utilities.builders.ModelBuilder;
import eu.eudat.logic.utilities.interfaces.ViewStyleDefinition;
import eu.eudat.models.data.components.commons.DefaultValue;
import eu.eudat.models.data.components.commons.Multiplicity;
import eu.eudat.models.data.components.commons.ViewStyle;
import eu.eudat.models.data.components.commons.Visibility;
import eu.eudat.models.data.properties.PropertiesGenerator;
import eu.eudat.models.data.user.composite.PropertiesModelBuilder;
import eu.eudat.logic.utilities.interfaces.ViewStyleDefinition;
import eu.eudat.logic.utilities.builders.ModelBuilder;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -26,6 +22,7 @@ import java.util.stream.Collectors;
public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefinition<eu.eudat.models.data.entities.xmlmodels.datasetprofiledefinition.Field>, PropertiesGenerator {
private static final Logger logger = LoggerFactory.getLogger(Field.class);
private static final ObjectMapper objectMapper = new ObjectMapper();
private String id;
private Integer ordinal;
private Object value;
@ -205,10 +202,9 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
@Override
public void fromJsonObject(Map<String, Object> properties) {
try {
ObjectMapper mapper = new ObjectMapper();
List<String> stringList = mapper.readValue(properties.get(this.id).toString(), LinkedList.class);
List<String> stringList = objectMapper.readValue(properties.get(this.id).toString(), LinkedList.class);
this.value = stringList;
} catch (JSONException | NullPointerException | IOException e) {
} catch (NullPointerException | IOException e) {
try {
this.value = (String) properties.get(this.id);
} catch (ClassCastException ce) {

View File

@ -175,12 +175,12 @@ public class FieldSet implements Comparable, PropertiesModelBuilder, ViewStyleDe
private FieldSet CloneForMultiplicity2(List<String> key, Map<String, Object> properties,String[] ids, int index){
FieldSet newFieldSet = new FieldSet();
newFieldSet.id = ids[0]+"_"+ids[1]+"_"+ids[2] + (ids.length > 4 ? "_" + ids[3] : "");
newFieldSet.id = ids[0] + "_" + ids[1] + "_" + ids[2] + (ids.length > 3 && !ids[ids.length - 1].isEmpty() ? "_" + ids[ids.length - 1] : "");
newFieldSet.description = this.description;
newFieldSet.extendedDescription = this.extendedDescription;
newFieldSet.additionalInformation=this.additionalInformation;
newFieldSet.title = this.title;
newFieldSet.ordinal = ids.length > 4 ? Integer.valueOf(ids[3]) : this.ordinal;
newFieldSet.ordinal = ids.length > 3 && !ids[ids.length - 1].isEmpty() && ids[ids.length - 1].matches("[0-9]+") ? Integer.valueOf(ids[ids.length - 1]) : this.ordinal;
newFieldSet.fields = new LinkedList();
for (Field field: this.fields) {

View File

@ -1,11 +1,15 @@
package eu.eudat.models.deposit.zenodo.mapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.DMP;
import eu.eudat.data.entities.Organisation;
import eu.eudat.data.entities.UserDMP;
import eu.eudat.logic.proxy.config.DOIFunder;
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
import eu.eudat.models.deposit.zenodo.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.Environment;
import java.time.Instant;
@ -13,9 +17,16 @@ import java.util.*;
import java.util.stream.Collectors;
public class DMPToZenodoMapper {
private static final ObjectMapper objectmapper = new ObjectMapper();
private static final Logger logger = LoggerFactory.getLogger(DMPToZenodoMapper.class);
public static ZenodoDeposit fromDMP(DMP dmp, Environment environment, ConfigLoader configLoader) {
Map<String, Object> extraProperties = dmp.getExtraProperties() != null ? new org.json.JSONObject(dmp.getExtraProperties()).toMap() : new HashMap<>();
Map<String, Object> extraProperties = null;
try {
extraProperties = dmp.getExtraProperties() != null ? objectmapper.readValue(dmp.getExtraProperties(), LinkedHashMap.class) : new HashMap<>();
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
ZenodoDeposit deposit = new ZenodoDeposit();
deposit.setMetadata(new ZenodoDepositMetadata());
deposit.getMetadata().setTitle(dmp.getLabel());
@ -26,11 +37,11 @@ public class DMPToZenodoMapper {
ZenodoComunity community = new ZenodoComunity();
community.setIdentifier(environment.getProperty("zenodo.community"));
deposit.getMetadata().setCommunities(Collections.singletonList(community));
if (extraProperties.get("visible") == null) {
if (extraProperties != null && extraProperties.get("visible") == null) {
deposit.getMetadata().setAccessRight(ZenodoAccessRight.RESTRICTED);
deposit.getMetadata().setAccessConditions("");
} else {
if (((Boolean) extraProperties.get("visible"))) {
if (extraProperties != null && ((Boolean) extraProperties.get("visible"))) {
Instant publicationDate = Instant.parse(extraProperties.get("publicDate").toString());
if (publicationDate.isBefore(Instant.now())) {
deposit.getMetadata().setAccessRight(ZenodoAccessRight.OPEN);

View File

@ -1,23 +1,23 @@
package eu.eudat.models.rda.mapper;
import java.util.*;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.utilities.json.JavaToJson;
import eu.eudat.models.rda.Cost;
import eu.eudat.models.rda.PidSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
public class CostRDAMapper {
private static final Logger logger = LoggerFactory.getLogger(DatasetRDAMapper.class);
private static final ObjectMapper objectMapper = new ObjectMapper();
public static Cost toRDA(Map<String, Object> cost) {
Cost rda = new Cost();
Map<String, Object> code = new org.json.JSONObject((String) cost.get("code")).toMap();
try {
Map<String, Object> code = objectMapper.readValue((String) cost.get("code"), LinkedHashMap.class);
rda.setCurrencyCode(Cost.CurrencyCode.fromValue((String) code.get("value")));
rda.setDescription((String) cost.get("description"));
if (cost.get("title") == null) {
@ -25,6 +25,9 @@ public class CostRDAMapper {
}
rda.setTitle((String) cost.get("title"));
rda.setValue(((Integer) cost.get("value")).doubleValue());
} catch (JsonProcessingException e) {
logger.error(e.getLocalizedMessage(), e);
}
return rda;
}
@ -55,8 +58,8 @@ public class CostRDAMapper {
rda.setValue(Double.valueOf(rdaValue));
}
else if(rdaProperty.contains("currency_code")){
HashMap<String,String> result =
new ObjectMapper().readValue(rdaValue, HashMap.class);
HashMap<String, String> result =
objectMapper.readValue(rdaValue, HashMap.class);
rda.setCurrencyCode(Cost.CurrencyCode.fromValue(result.get("value")));
}
else if(rdaProperty.contains("title")){

Some files were not shown because too many files have changed in this diff Show More