Merge branch 'Development'

This commit is contained in:
Diamantis Tziotzios 2023-06-20 09:39:15 +03:00
commit 50c96ef821
276 changed files with 6287 additions and 2499 deletions

View File

@ -16,4 +16,6 @@ public interface DatasetProfileDao extends DatabaseAccessLayer<DatasetProfile, U
QueryableList<DatasetProfile> getAuthenticated(QueryableList<DatasetProfile> query, UUID principal, List<Integer> roles);
List<DatasetProfile> getAllIds();
}

View File

@ -15,6 +15,7 @@ import org.springframework.stereotype.Component;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
@ -86,6 +87,11 @@ public class DatasetProfileDaoImpl extends DatabaseAccess<DatasetProfile> implem
return getDatabaseService().getQueryable(DatasetProfile.class);
}
@Override
public List<DatasetProfile> getAllIds(){
return getDatabaseService().getQueryable(DatasetProfile.class).withFields(Collections.singletonList("id")).toList();
}
@Override
public void delete(DatasetProfile item) {
this.getDatabaseService().delete(item);

View File

@ -25,7 +25,12 @@ import java.util.stream.Collectors;
name = "fullyDetailed",
attributeNodes = {
@NamedAttributeNode("grant"), @NamedAttributeNode("profile"),
@NamedAttributeNode("users"), @NamedAttributeNode("organisations"), @NamedAttributeNode("researchers")}),
@NamedAttributeNode(value = "users", subgraph = "users"), @NamedAttributeNode("organisations"), @NamedAttributeNode("researchers")
},
subgraphs = {
@NamedSubgraph(name = "users", attributeNodes = {@NamedAttributeNode("user")})
}
),
@NamedEntityGraph(
name = "dmpRecentActivity",
attributeNodes = {

View File

@ -34,7 +34,7 @@
<dependency>
<groupId>gr.cite.opendmp</groupId>
<artifactId>repositorydepositbase</artifactId>
<version>1.0.1</version>
<version>1.0.3</version>
</dependency>

View File

@ -0,0 +1,46 @@
package eu.eudat.controllers;
import eu.eudat.logic.managers.MaterialManager;
import eu.eudat.logic.managers.MetricsManager;
import eu.eudat.types.MetricNames;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.stream.Stream;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/material/about/"})
public class AboutController {
private Environment environment;
private MaterialManager materialManager;
private final MetricsManager metricsManager;
@Autowired
public AboutController(Environment environment, MaterialManager materialManager, MetricsManager metricsManager) {
this.environment = environment;
this.materialManager = materialManager;
this.metricsManager = metricsManager;
}
@RequestMapping(path = "{lang}", method = RequestMethod.GET )
public ResponseEntity<byte[]> getAbout(@PathVariable(name = "lang") String lang) throws IOException {
// long files = 0;
// try (Stream<Path> paths = Files.list(Paths.get(Objects.requireNonNull(this.environment.getProperty("about.path"))))) {
// files = paths.count();
// }
// metricsManager.calculateValue(MetricNames.LANGUAGES, (int) files, null);
try (Stream<Path> paths = Files.walk(Paths.get(Objects.requireNonNull(this.environment.getProperty("about.path"))))) {
return this.materialManager.getResponseEntity(lang, paths);
}
}
}

View File

@ -190,8 +190,10 @@ public class Admin extends BaseController {
.status(ApiMessageCode.SUCCESS_MESSAGE).message(""));
}
@RequestMapping(method = RequestMethod.GET, value = {"/getRDACommonStandards"}, produces = "application/json")
public ResponseEntity getRDACommonStandards(@ClaimedAuthorities(claims = {ADMIN, DATASET_PROFILE_MANAGER}) Principal principal) {
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<List<String>>().status(ApiMessageCode.SUCCESS_MESSAGE).payload(configLoader.getRdaProperties()));
@RequestMapping(method = RequestMethod.GET, value = {"/getSemantics"}, produces = "application/json")
public ResponseEntity<ResponseItem<List<String>>> getSemantics(@RequestParam(value = "query", required = false) String query, @ClaimedAuthorities(claims = {ADMIN, DATASET_PROFILE_MANAGER}) Principal principal) {
List<String> semantics = this.datasetProfileManager.getSemantics(query);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<List<String>>().status(ApiMessageCode.SUCCESS_MESSAGE).payload(semantics));
}
}

View File

@ -9,12 +9,10 @@ import eu.eudat.data.entities.DMP;
import eu.eudat.data.entities.Dataset;
import eu.eudat.data.query.items.table.datasetprofile.DatasetProfileTableRequestItem;
import eu.eudat.data.query.items.table.dmp.DataManagementPlanTableRequest;
import eu.eudat.data.query.items.table.dmp.DataManagmentPlanPublicTableRequest;
import eu.eudat.exceptions.datamanagementplan.DMPNewVersionException;
import eu.eudat.exceptions.datamanagementplan.DMPWithDatasetsDeleteException;
import eu.eudat.exceptions.security.UnauthorisedException;
import eu.eudat.logic.managers.DataManagementPlanManager;
import eu.eudat.logic.managers.DatasetManager;
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
import eu.eudat.logic.security.claims.ClaimedAuthorities;
import eu.eudat.logic.services.ApiContext;
@ -152,7 +150,7 @@ public class DMPs extends BaseController {
@RequestMapping(method = RequestMethod.GET, value = {"/publicOverview/{id}"})
public @ResponseBody
ResponseEntity getOverviewSinglePublic(@PathVariable String id, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) throws Exception {
ResponseEntity<ResponseItem<DataManagementPlanOverviewModel>> getOverviewSinglePublic(@PathVariable String id, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) throws Exception {
// try {
DataManagementPlanOverviewModel dataManagementPlan = this.dataManagementPlanManager.getOverviewSingle(id, principal, true);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.NO_MESSAGE).payload(dataManagementPlan));
@ -184,7 +182,21 @@ public class DMPs extends BaseController {
public @ResponseBody
ResponseEntity getRDAJsonDocument(@PathVariable String id, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) {
try {
return this.dataManagementPlanManager.getRDAJsonDocument(id, principal);
FileEnvelope rdaJsonDocument = this.dataManagementPlanManager.getRDAJsonDocument(id, principal);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentLength(rdaJsonDocument.getFile().length());
responseHeaders.setContentType(MediaType.APPLICATION_OCTET_STREAM);
responseHeaders.set("Content-Disposition", "attachment;filename=" + rdaJsonDocument.getFilename());
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
InputStream resource = new FileInputStream(rdaJsonDocument.getFile());
byte[] content = org.apache.poi.util.IOUtils.toByteArray(resource);
resource.close();
Files.deleteIfExists(rdaJsonDocument.getFile().toPath());
return new ResponseEntity<>(content, responseHeaders, HttpStatus.OK);
} catch (Exception e) {
return ResponseEntity.status(HttpStatus.EXPECTATION_FAILED).body(new ResponseItem<>().message(e.getMessage()).status(ApiMessageCode.ERROR_MESSAGE));
}
@ -321,21 +333,6 @@ public class DMPs extends BaseController {
}
}
/*
* DOI Generation
* */
@RequestMapping(method = RequestMethod.POST, value = {"/createZenodoDoi/{id}"})
public ResponseEntity<ResponseItem<String>> createZenodoDoi(@PathVariable String id, Principal principal) {
try {
String zenodoDOI = this.dataManagementPlanManager.createZenodoDoi(UUID.fromString(id), principal);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<String>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Successfully created DOI for Data Datamanagement Plan in question.").payload(zenodoDOI));
} catch (Exception e) {
logger.error(e.getMessage(), e);
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("Failed to create DOI for the Data Management Plan: " + e.getMessage()));
}
}
/*
* Data Index
* */

View File

@ -117,7 +117,7 @@ public class Datasets extends BaseController {
@RequestMapping(method = RequestMethod.GET, value = {"/publicOverview/{id}"})
public @ResponseBody
ResponseEntity getOverviewSinglePublic(@PathVariable String id, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) throws Exception {
ResponseEntity<ResponseItem<DatasetOverviewModel>> getOverviewSinglePublic(@PathVariable String id, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) throws Exception {
// try {
DatasetOverviewModel dataset = this.datasetManager.getOverviewSingle(id, principal, true);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DatasetOverviewModel>().status(ApiMessageCode.NO_MESSAGE).payload(dataset));

View File

@ -0,0 +1,57 @@
package eu.eudat.controllers;
import eu.eudat.exceptions.emailconfirmation.HasConfirmedEmailException;
import eu.eudat.exceptions.emailconfirmation.TokenExpiredException;
import eu.eudat.logic.managers.UnlinkEmailConfirmationManager;
import eu.eudat.models.data.helpers.responses.ResponseItem;
import eu.eudat.models.data.security.Principal;
import eu.eudat.models.data.userinfo.UserUnlinkRequestModel;
import eu.eudat.types.ApiMessageCode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.transaction.Transactional;
@RestController
@CrossOrigin
@RequestMapping(value = "api/emailUnlinkConfirmation")
public class EmailUnlinkConfirmation {
private UnlinkEmailConfirmationManager unlinkEmailConfirmationManager;
@Autowired
public EmailUnlinkConfirmation(UnlinkEmailConfirmationManager unlinkEmailConfirmationManager){
this.unlinkEmailConfirmationManager = unlinkEmailConfirmationManager;
}
@Transactional
@RequestMapping(method = RequestMethod.GET, value = {"/{emailToken}"})
public @ResponseBody
ResponseEntity<ResponseItem> emailConfirmation(@PathVariable(value = "emailToken") String token) {
try {
this.unlinkEmailConfirmationManager.confirmEmail(token);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem().status(ApiMessageCode.SUCCESS_MESSAGE));
} catch (TokenExpiredException | HasConfirmedEmailException ex) {
if (ex instanceof TokenExpiredException) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem().status(ApiMessageCode.NO_MESSAGE));
}
else {
return ResponseEntity.status(HttpStatus.FOUND).body(new ResponseItem().status(ApiMessageCode.WARN_MESSAGE));
}
}
}
@Transactional
@RequestMapping(method = RequestMethod.POST, consumes = "application/json", produces = "application/json")
public @ResponseBody
ResponseEntity<ResponseItem> sendUnlinkConfirmationEmail(@RequestBody UserUnlinkRequestModel requestModel, Principal principal) {
try {
this.unlinkEmailConfirmationManager.sendConfirmationEmail(requestModel.getEmail(), principal, requestModel.getUserId(), requestModel.getProvider());
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem().status(ApiMessageCode.SUCCESS_MESSAGE));
} catch (Exception ex) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem().status(ApiMessageCode.NO_MESSAGE).message("Could not send unlink email."));
}
}
}

View File

@ -0,0 +1,45 @@
package eu.eudat.controllers;
import eu.eudat.logic.managers.MaterialManager;
import eu.eudat.logic.managers.MetricsManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.stream.Stream;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/material/faq/"})
public class FaqController {
private Environment environment;
private MaterialManager materialManager;
private final MetricsManager metricsManager;
@Autowired
public FaqController(Environment environment, MaterialManager materialManager, MetricsManager metricsManager) {
this.environment = environment;
this.materialManager = materialManager;
this.metricsManager = metricsManager;
}
@RequestMapping(path = "{lang}", method = RequestMethod.GET )
public ResponseEntity<byte[]> getFaq(@PathVariable(name = "lang") String lang) throws IOException {
// long files = 0;
// try (Stream<Path> paths = Files.list(Paths.get(Objects.requireNonNull(this.environment.getProperty("faq.path"))))) {
// files = paths.count();
// }
// metricsManager.calculateValue(MetricNames.LANGUAGES, (int) files, null);
try (Stream<Path> paths = Files.walk(Paths.get(Objects.requireNonNull(this.environment.getProperty("faq.path"))))) {
return this.materialManager.getResponseEntity(lang, paths);
}
}
}

View File

@ -0,0 +1,45 @@
package eu.eudat.controllers;
import eu.eudat.logic.managers.MaterialManager;
import eu.eudat.logic.managers.MetricsManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.stream.Stream;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/material/glossary/"})
public class GlossaryController {
private Environment environment;
private MaterialManager materialManager;
private final MetricsManager metricsManager;
@Autowired
public GlossaryController(Environment environment, MaterialManager materialManager, MetricsManager metricsManager) {
this.environment = environment;
this.materialManager = materialManager;
this.metricsManager = metricsManager;
}
@RequestMapping(path = "{lang}", method = RequestMethod.GET )
public ResponseEntity<byte[]> getGlossary(@PathVariable(name = "lang") String lang) throws IOException {
// long files = 0;
// try (Stream<Path> paths = Files.list(Paths.get(Objects.requireNonNull(this.environment.getProperty("glossary.path"))))) {
// files = paths.count();
// }
// metricsManager.calculateValue(MetricNames.LANGUAGES, (int) files, null);
try (Stream<Path> paths = Files.walk(Paths.get(Objects.requireNonNull(this.environment.getProperty("glossary.path"))))) {
return this.materialManager.getResponseEntity(lang, paths);
}
}
}

View File

@ -0,0 +1,54 @@
package eu.eudat.controllers;
import eu.eudat.exceptions.datasetprofile.DatasetProfileNewVersionException;
import eu.eudat.logic.managers.DatasetProfileManager;
import eu.eudat.logic.security.claims.ClaimedAuthorities;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.models.data.admin.composite.DatasetProfile;
import eu.eudat.models.data.helpers.responses.ResponseItem;
import eu.eudat.models.data.security.Principal;
import eu.eudat.types.ApiMessageCode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.transaction.Transactional;
import static eu.eudat.types.Authorities.ADMIN;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/management/"})
public class ManagementController extends BaseController {
private DatasetProfileManager datasetProfileManager;
@Autowired
public ManagementController(ApiContext apiContext, DatasetProfileManager datasetProfileManager){
super(apiContext);
this.datasetProfileManager = datasetProfileManager;
}
@Transactional
@RequestMapping(method = RequestMethod.POST, value = {"/addSemantics"})
public ResponseEntity addSemanticsInDatasetProfiles(@ClaimedAuthorities(claims = {ADMIN}) Principal principal) throws Exception {
try {
this.datasetProfileManager.addSemanticsInDatasetProfiles();
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<Void>().status(ApiMessageCode.SUCCESS_MESSAGE));
} catch (Exception exception) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<Void>().status(ApiMessageCode.ERROR_MESSAGE).message(exception.getMessage()));
}
}
@Transactional
@RequestMapping(method = RequestMethod.POST, value = {"/addRdaInSemantics"})
public ResponseEntity addRdaInSemanticsInDatasetProfiles(@ClaimedAuthorities(claims = {ADMIN}) Principal principal) throws Exception {
try {
this.datasetProfileManager.addRdaInSemanticsInDatasetProfiles();
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<Void>().status(ApiMessageCode.SUCCESS_MESSAGE));
} catch (Exception exception) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<Void>().status(ApiMessageCode.ERROR_MESSAGE).message(exception.getMessage()));
}
}
}

View File

@ -0,0 +1,78 @@
package eu.eudat.controllers;
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
import eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.saml2.Saml2ConfigurableProvider;
import eu.eudat.logic.security.validators.configurableProvider.Saml2SSOUtils;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.models.data.helpers.responses.ResponseItem;
import eu.eudat.types.ApiMessageCode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.nio.charset.StandardCharsets;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/saml2/"})
public class Saml2MetadataController extends BaseController {
private final ConfigLoader configLoader;
@Autowired
public Saml2MetadataController(ApiContext apiContext, ConfigLoader configLoader) {
super(apiContext);
this.configLoader = configLoader;
}
@RequestMapping(method = RequestMethod.GET, value = {"metadata/{configurableProviderId}"})
public @ResponseBody
ResponseEntity getMetadata(@PathVariable String configurableProviderId) {
Saml2ConfigurableProvider saml2ConfigurableProvider = (Saml2ConfigurableProvider) this.configLoader.getConfigurableProviders().getProviders().stream()
.filter(prov -> prov.getConfigurableLoginId().equals(configurableProviderId))
.findFirst().orElse(null);
if (saml2ConfigurableProvider != null) {
try {
String metadataXml = Saml2SSOUtils.getMetadata(saml2ConfigurableProvider);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentLength(metadataXml.length());
responseHeaders.setContentType(MediaType.APPLICATION_OCTET_STREAM);
responseHeaders.set("Content-Disposition", "attachment;filename=" + configurableProviderId + ".xml");
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
return new ResponseEntity<>(metadataXml.getBytes(StandardCharsets.UTF_8),
responseHeaders,
HttpStatus.OK);
} catch (Exception e) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("Failed to fetch metadata."));
}
}
else {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("Failed to fetch metadata."));
}
}
@RequestMapping(method = RequestMethod.GET, value = {"authnRequest/{configurableProviderId}"})
public @ResponseBody
ResponseEntity<ResponseItem<String>> getAuthnRequest(@PathVariable String configurableProviderId) {
Saml2ConfigurableProvider saml2ConfigurableProvider = (Saml2ConfigurableProvider) this.configLoader.getConfigurableProviders().getProviders().stream()
.filter(prov -> prov.getConfigurableLoginId().equals(configurableProviderId))
.findFirst().orElse(null);
if (saml2ConfigurableProvider != null) {
try {
String authnRequestXml = Saml2SSOUtils.getAuthnRequest(saml2ConfigurableProvider);
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<String>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Created").payload(authnRequestXml));
}
catch (Exception e) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("Failed to create authentication request."));
}
}
else {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(new ResponseItem<String>().status(ApiMessageCode.ERROR_MESSAGE).message("Unknown provider."));
}
}
}

View File

@ -0,0 +1,45 @@
package eu.eudat.controllers;
import eu.eudat.logic.managers.MaterialManager;
import eu.eudat.logic.managers.MetricsManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.stream.Stream;
@RestController
@CrossOrigin
@RequestMapping(value = {"/api/material/termsofservice/"})
public class TermsOfServiceController {
private Environment environment;
private MaterialManager materialManager;
private final MetricsManager metricsManager;
@Autowired
public TermsOfServiceController(Environment environment, MaterialManager materialManager, MetricsManager metricsManager) {
this.environment = environment;
this.materialManager = materialManager;
this.metricsManager = metricsManager;
}
@RequestMapping(path = "{lang}", method = RequestMethod.GET )
public ResponseEntity<byte[]> getTermsOfService(@PathVariable(name = "lang") String lang) throws IOException {
// long files = 0;
// try (Stream<Path> paths = Files.list(Paths.get(Objects.requireNonNull(this.environment.getProperty("termsofservice.path"))))) {
// files = paths.count();
// }
// metricsManager.calculateValue(MetricNames.LANGUAGES, (int) files, null);
try (Stream<Path> paths = Files.walk(Paths.get(Objects.requireNonNull(this.environment.getProperty("termsofservice.path"))))) {
return this.materialManager.getResponseEntity(lang, paths);
}
}
}

View File

@ -1,5 +1,6 @@
package eu.eudat.controllers;
import eu.eudat.logic.managers.MaterialManager;
import eu.eudat.logic.managers.MetricsManager;
import eu.eudat.logic.security.claims.ClaimedAuthorities;
import eu.eudat.models.data.helpers.responses.ResponseItem;
@ -32,47 +33,26 @@ import static eu.eudat.types.Authorities.ADMIN;
public class UserGuideController {
private Environment environment;
private MaterialManager materialManager;
private final MetricsManager metricsManager;
@Autowired
public UserGuideController(Environment environment, MetricsManager metricsManager) {
public UserGuideController(Environment environment, MaterialManager materialManager, MetricsManager metricsManager) {
this.environment = environment;
this.materialManager = materialManager;
this.metricsManager = metricsManager;
}
@RequestMapping(path = "{lang}", method = RequestMethod.GET )
public ResponseEntity getUserGuide(@PathVariable(name = "lang") String lang) throws IOException {
public ResponseEntity<byte[]> getUserGuide(@PathVariable(name = "lang") String lang) throws IOException {
long files = 0;
try (Stream<Path> paths = Files.list(Paths.get(Objects.requireNonNull(this.environment.getProperty("userguide.path"))))) {
files = paths.count();
}
metricsManager.calculateValue(MetricNames.LANGUAGES, (int) files, null);
try (Stream<Path> paths = Files.walk(Paths.get(Objects.requireNonNull(this.environment.getProperty("userguide.path"))))) {
List<String> result = paths.filter(Files::isRegularFile)
.map(Path::toString).collect(Collectors.toList());
String fileName = result.stream().filter(guide -> guide.contains("_" + lang)).findFirst().orElse(null);
if (fileName == null) {
fileName = result.stream().filter(guide -> guide.contains("_en")).findFirst().get();
return this.materialManager.getResponseEntity(lang, paths);
}
InputStream is = new FileInputStream(fileName);
Path path = Paths.get(fileName);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentLength(is.available());
responseHeaders.setContentType(MediaType.TEXT_HTML);
responseHeaders.set("Content-Disposition", "attachment;filename=" + path.getFileName().toString());
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
byte[] content = new byte[is.available()];
is.read(content);
is.close();
return new ResponseEntity<>(content, responseHeaders, HttpStatus.OK);
}
}
@RequestMapping(value = "current", method = RequestMethod.POST)

View File

@ -88,16 +88,6 @@ public class Users extends BaseController {
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DataTableData<UserListingModel>>().payload(dataTable).status(ApiMessageCode.NO_MESSAGE));
}
@RequestMapping(method = RequestMethod.GET, value = {"/hasDOIToken"}, consumes = "application/json", produces = "application/json")
public @ResponseBody
ResponseEntity<ResponseItem<Boolean>> hasDOIToken(Principal principal) throws NullEmailException {
try {
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<Boolean>().payload(this.userManager.isDOITokenValid(principal)).status(ApiMessageCode.NO_MESSAGE));
} catch (NonValidTokenException | ExpiredTokenException | IOException e) {
return ResponseEntity.status(460).body(new ResponseItem<Boolean>().payload(false).status(ApiMessageCode.ERROR_MESSAGE).message(e.getMessage()));
}
}
@Transactional
@RequestMapping(method = RequestMethod.POST, value = {"/registerDOIToken"}, consumes = "application/json", produces = "application/json")
public @ResponseBody

View File

@ -481,6 +481,7 @@ public class DataManagementPlanManager {
checkDmpValidationRules(newDmp);
}
UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
newDmp.setCreator(user);
createOrganisationsIfTheyDontExist(newDmp, apiContext.getOperationsContext().getDatabaseRepository().getOrganisationDao());
createResearchersIfTheyDontExist(newDmp, apiContext.getOperationsContext().getDatabaseRepository().getResearcherDao(), user);
@ -645,6 +646,7 @@ public class DataManagementPlanManager {
if (latestVersionDMP.get(0).getVersion().equals(oldDmp.getVersion())) {
DMP newDmp = dataManagementPlan.toDataModel();
UserInfo user = apiContext.getOperationsContext().getBuilderFactory().getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
newDmp.setCreator(user);
createOrganisationsIfTheyDontExist(newDmp, databaseRepository.getOrganisationDao());
createResearchersIfTheyDontExist(newDmp, databaseRepository.getResearcherDao(), user);
@ -705,6 +707,7 @@ public class DataManagementPlanManager {
DMP newDmp = dataManagementPlan.toDataModel();
UserInfo user = apiContext.getOperationsContext().getBuilderFactory().getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
newDmp.setCreator(user);
createOrganisationsIfTheyDontExist(newDmp, databaseRepository.getOrganisationDao());
createResearchersIfTheyDontExist(newDmp, databaseRepository.getResearcherDao(), user);
@ -1064,9 +1067,6 @@ public class DataManagementPlanManager {
});
UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
sendNotification(dmp, user, NotificationType.DMP_PUBLISH);
// if (dmp.getDois() != null && !dmp.getDois().isEmpty()) {
// this.createZenodoDoi(dmp.getId(), principal, true);
// }
}
@Transactional
@ -1456,6 +1456,40 @@ public class DataManagementPlanManager {
}
dmpElement.appendChild(dmpProfileElement);
Element dmpContactElement = xmlDoc.createElement("contact");
Element dmpContactName = xmlDoc.createElement("name");
Element dmpContactEmail = xmlDoc.createElement("email");
if(dmp.getCreator() != null){
dmpContactName.setTextContent(dmp.getCreator().getName());
dmpContactEmail.setTextContent(dmp.getCreator().getEmail());
}
else{
Iterator<UserDMP> users = dmp.getUsers().iterator();
if(users.hasNext()){
UserDMP creator = users.next();
dmpContactName.setTextContent(creator.getUser().getName());
dmpContactEmail.setTextContent(creator.getUser().getEmail());
}
}
dmpContactElement.appendChild(dmpContactName);
dmpContactElement.appendChild(dmpContactEmail);
dmpElement.appendChild(dmpContactElement);
Element dmpContributorsElement = xmlDoc.createElement("contributors");
Iterator<UserDMP> users = dmp.getUsers().iterator();
while(users.hasNext()){
Element dmpContributorElement = xmlDoc.createElement("contributor");
Element dmpContributorName = xmlDoc.createElement("name");
Element dmpContributorEmail= xmlDoc.createElement("email");
UserDMP contributor = users.next();
dmpContributorName.setTextContent(contributor.getUser().getName());
dmpContributorEmail.setTextContent(contributor.getUser().getEmail());
dmpContributorElement.appendChild(dmpContributorName);
dmpContributorElement.appendChild(dmpContributorEmail);
dmpContributorsElement.appendChild(dmpContributorElement);
}
dmpElement.appendChild(dmpContributorsElement);
// Funder.
Element funder = xmlDoc.createElement("funder");
Element funderLabel = xmlDoc.createElement("label");
@ -1464,6 +1498,13 @@ public class DataManagementPlanManager {
funderId.setTextContent(dmp.getGrant().getFunder().getId().toString());
funder.appendChild(funderLabel);
funder.appendChild(funderId);
if(dmp.getGrant().getFunder().getReference() != null){
String referencePrefix = dmp.getGrant().getFunder().getReference().split(":")[0];
String shortReference = dmp.getGrant().getFunder().getReference().substring(referencePrefix.length() + 1);
Element funderReference = xmlDoc.createElement("reference");
funderReference.setTextContent(shortReference);
funder.appendChild(funderReference);
}
dmpElement.appendChild(funder);
// Grant.
Element grant = xmlDoc.createElement("grant");
@ -1473,15 +1514,31 @@ public class DataManagementPlanManager {
grantId.setTextContent(dmp.getGrant().getId().toString());
grant.appendChild(grantLabel);
grant.appendChild(grantId);
if(dmp.getGrant().getReference() != null) {
String referencePrefix = dmp.getGrant().getReference().split(":")[0];
String shortReference = dmp.getGrant().getReference().substring(referencePrefix.length() + 1);
Element grantReference = xmlDoc.createElement("reference");
grantReference.setTextContent(shortReference);
grant.appendChild(grantReference);
}
dmpElement.appendChild(grant);
// Project.
Element project = xmlDoc.createElement("project");
Element projectLabel = xmlDoc.createElement("label");
Element projectId = xmlDoc.createElement("id");
projectLabel.setTextContent(dmp.getProject().getLabel());
Element projectLabel = xmlDoc.createElement("label");
Element projectDescription = xmlDoc.createElement("description");
Element projectStartDate = xmlDoc.createElement("start");
Element projectEndDate = xmlDoc.createElement("end");
projectId.setTextContent(dmp.getProject().getId().toString());
project.appendChild(projectLabel);
projectLabel.setTextContent(dmp.getProject().getLabel());
projectDescription.setTextContent(dmp.getProject().getDescription());
projectStartDate.setTextContent(dmp.getProject().getStartdate().toString());
projectEndDate.setTextContent(dmp.getProject().getEnddate().toString());
project.appendChild(projectId);
project.appendChild(projectLabel);
project.appendChild(projectDescription);
project.appendChild(projectStartDate);
project.appendChild(projectEndDate);
dmpElement.appendChild(project);
Element organisationsElement = xmlDoc.createElement("organisations");
@ -1513,11 +1570,20 @@ public class DataManagementPlanManager {
for (Dataset dataset : datasets) {
Element datasetElement = xmlDoc.createElement("dataset");
Element datsetProfileElement = xmlDoc.createElement("profile");
datasetElement.setAttribute("name", dataset.getLabel());
Element datasetDescriptionElement = xmlDoc.createElement("description");
datasetElement.appendChild(datasetDescriptionElement);
datasetDescriptionElement.setTextContent(dataset.getDescription());
Element datsetProfileElement = xmlDoc.createElement("profile-id");
datasetElement.appendChild(datsetProfileElement);
datsetProfileElement.setTextContent(dataset.getProfile().getId().toString());
Element datsetProfileLabelElement = xmlDoc.createElement("profile-label");
datasetElement.appendChild(datsetProfileLabelElement);
datsetProfileLabelElement.setTextContent(dataset.getProfile().getLabel());
DatasetWizardModel datasetWizardModel = new DatasetWizardModel();
Map<String, Object> properties = new HashMap<>();
if (dataset.getProperties() != null) {
@ -1535,12 +1601,18 @@ public class DataManagementPlanManager {
// Get DatasetProfiles from DMP to add to XML.
for (DatasetProfile datasetProfile : dmp.getAssociatedDmps()) {
Element profile = xmlDoc.createElement("profile");
Element profileLabel = xmlDoc.createElement("profilelabel");
profileLabel.setTextContent(datasetProfile.getLabel());
profile.appendChild(profileLabel);
Element profileId = xmlDoc.createElement("profileId");
profileId.setTextContent(datasetProfile.getId().toString());
profile.appendChild(profileId);
Element profileGroupId = xmlDoc.createElement("profileGroupId");
profileGroupId.setTextContent(datasetProfile.getGroupId().toString());
profile.appendChild(profileGroupId);
Element profileLabel = xmlDoc.createElement("profileLabel");
profileLabel.setTextContent(datasetProfile.getLabel());
profile.appendChild(profileLabel);
Element profileVersion = xmlDoc.createElement("profileVersion");
profileVersion.setTextContent(String.valueOf(datasetProfile.getVersion()));
profile.appendChild(profileVersion);
profiles.appendChild(profile);
}
dmpElement.appendChild(profiles);
@ -1556,13 +1628,12 @@ public class DataManagementPlanManager {
return fileEnvelope;
}
public ResponseEntity<byte[]> getRDAJsonDocument(String id, Principal principal) throws Exception {
public FileEnvelope getRDAJsonDocument(String id, Principal principal) throws Exception {
eu.eudat.data.entities.DMP dmp = databaseRepository.getDmpDao().find(UUID.fromString(id));
if (!dmp.isPublic() && dmp.getUsers().stream().noneMatch(userInfo -> userInfo.getUser().getId() == principal.getId()))
throw new UnauthorisedException();
// RDAExportModel rdaExportModel = new RDAExportModel().fromDataModel(dmp, datasetManager, principal);
final Boolean isFinalized = dmp.getStatus() == DMP.DMPStatus.FINALISED.getValue();
final Boolean isPublic = dmp.isPublic();
final boolean isFinalized = dmp.getStatus() == DMP.DMPStatus.FINALISED.getValue();
final boolean isPublic = dmp.isPublic();
dmp.setDataset(dmp.getDataset().stream()
.filter(dataset -> dataset.getStatus() != Dataset.Status.DELETED.getValue() &&
dataset.getStatus() != Dataset.Status.CANCELED.getValue())
@ -1570,15 +1641,12 @@ public class DataManagementPlanManager {
.collect(Collectors.toSet()));
String result = rdaManager.convertToRDA(dmp);
/*ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);*/
String fileName = "DMP_" + dmp.getGrant().getLabel() + "_" + dmp.getVersion();//dmp.getLabel();
String fileName = "DMP_" + dmp.getGrant().getLabel() + "_" + dmp.getVersion();
fileName = fileName.replaceAll("[^a-zA-Z0-9+ ]", "").replace(" ", "_").replace(",", "_");
String uuid = UUID.randomUUID().toString();
File file = new File(this.environment.getProperty("temp.temp") + uuid + ".json");
OutputStream output = new FileOutputStream(file);
try {
// mapper.writeValue(file, rdaExportModel);
output.write(result.getBytes());
output.flush();
output.close();
@ -1586,18 +1654,10 @@ public class DataManagementPlanManager {
logger.error(e.getMessage(), e);
}
InputStream resource = new FileInputStream(file);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentLength(file.length());
responseHeaders.setContentType(MediaType.APPLICATION_OCTET_STREAM);
responseHeaders.set("Content-Disposition", "attachment;filename=" + fileName + ".json");
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
byte[] content = org.apache.poi.util.IOUtils.toByteArray(resource);
resource.close();
Files.deleteIfExists(file.toPath());
return new ResponseEntity<>(content, responseHeaders, HttpStatus.OK);
FileEnvelope rdaJsonDocument = new FileEnvelope();
rdaJsonDocument.setFilename(fileName + ".json");
rdaJsonDocument.setFile(file);
return rdaJsonDocument;
}
public ResponseEntity<byte[]> getDocument(String id, String contentType, Principal principal, ConfigLoader configLoader) throws InstantiationException, IllegalAccessException, IOException {
@ -2041,100 +2101,6 @@ public class DataManagementPlanManager {
return null;
}
private String getUnpublishedDOI(String DOI, String token, Integer version) {
try {
RestTemplate restTemplate = new RestTemplate();
HttpHeaders headers = new HttpHeaders();
headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
headers.setContentType(MediaType.APPLICATION_JSON);
Map createResponse = null;
LinkedHashMap<String, String> links = null;
LinkedHashMap<String, String> metadata = null;
String listUrl = this.environment.getProperty("zenodo.url") + "deposit/depositions" + "?q=conceptdoi:\"" + DOI + "\"&access_token=" + token;
ResponseEntity<Map[]> listResponses = restTemplate.getForEntity(listUrl, Map[].class);
createResponse = listResponses.getBody()[0];
metadata = (LinkedHashMap<String, String>) createResponse.get("metadata");
links = (LinkedHashMap<String, String>) createResponse.get("links");
if (metadata.get("version").equals(version.toString())) {
return links.get("publish");
} else {
return null;
}
}catch (Exception e) {
logger.warn(e.getMessage(), e);
return null;
}
}
public String createZenodoDoi(UUID id, Principal principal) throws Exception {
return this.createZenodoDoi(id, principal, false);
}
public String createZenodoDoi(UUID id, Principal principal, boolean update) throws Exception {
DMP dmp = this.apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(id);
if (!isUserOwnerOfDmp(dmp, principal))
throw new Exception("User is not authorized to invoke this action");
if (!dmp.getStatus().equals(DMP.DMPStatus.FINALISED.getValue()))
throw new Exception("DMP is not finalized");
/*if (dmp.getDoi() != null)
throw new Exception("DMP already has a DOI");*/
FileEnvelope file = getWordDocument(id.toString(), principal, configLoader);
String name = file.getFilename().substring(0, file.getFilename().length() - 5);
File pdfFile = PDFUtils.convertToPDF(file, environment);
String fileName = name + ".pdf";
ResponseEntity<byte[]> jsonFile;
try {
jsonFile = getRDAJsonDocument(id.toString(), principal);
} catch (Exception e) {
throw e;
}
String previousDOI = this.getPreviousDOI(dmp.getGroupId(), dmp.getId(), "Zenodo");
File supportingFilesZip = this.createSupportingFilesZip(dmp);
DMPDepositModel dmpDepositModel = DMPToDepositMapper.fromDMP(dmp, pdfFile, fileName, jsonFile, supportingFilesZip, previousDOI);
String zenodoToken = "";
try {
if (this.userManager.isDOITokenValid(principal)) {
zenodoToken = principal.getZenodoToken();
}
} catch (NonValidTokenException e) {
zenodoToken = this.environment.getProperty("zenodo.access_token");
}
String finalDoi = null;
for(RepositoryDeposit repo: this.repositoriesDeposit) { //temp
if(repo.getConfiguration().getRepositoryId().equals("Zenodo")) {
finalDoi = repo.deposit(dmpDepositModel, zenodoToken);
if (finalDoi != null) {
EntityDoi doiEntity = new EntityDoi();
doiEntity.setId(UUID.randomUUID());
doiEntity.setEntityType(EntityDoi.EntityType.DMP);
doiEntity.setDoi(finalDoi);
doiEntity.setRepositoryId("Zenodo");
Date now = new Date();
doiEntity.setCreatedAt(now);
doiEntity.setUpdatedAt(now);
doiEntity.setEntityId(dmp);
apiContext.getOperationsContext().getDatabaseRepository().getEntityDoiDao().createOrUpdate(doiEntity);
dmp.getDois().add(doiEntity);
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().createOrUpdate(dmp);
}
}
}
if(supportingFilesZip != null) {
Files.deleteIfExists(supportingFilesZip.toPath());
}
Files.deleteIfExists(pdfFile.toPath());
Files.deleteIfExists(file.getFile().toPath());
return finalDoi;
}
public Doi createDoi(DepositRequest depositRequest, Principal principal) throws Exception {
DMP dmp = this.apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(UUID.fromString(depositRequest.getDmpId()));
if (!isUserOwnerOfDmp(dmp, principal))
@ -2147,18 +2113,22 @@ public class DataManagementPlanManager {
FileEnvelope file = getWordDocument(depositRequest.getDmpId(), principal, configLoader);
String name = file.getFilename().substring(0, file.getFilename().length() - 5).replaceAll("[^a-zA-Z0-9_+ ]", "").replace(" ", "_").replace(",", "_");
File pdfFile = PDFUtils.convertToPDF(file, environment);
String fileName = name + ".pdf";
ResponseEntity<byte[]> jsonFile;
eu.eudat.depositinterface.models.FileEnvelope pdfEnvelope = new eu.eudat.depositinterface.models.FileEnvelope();
pdfEnvelope.setFile(pdfFile);
pdfEnvelope.setFilename(name + ".pdf");
eu.eudat.depositinterface.models.FileEnvelope rdaJsonFile = new eu.eudat.depositinterface.models.FileEnvelope();
try {
jsonFile = getRDAJsonDocument(depositRequest.getDmpId(), principal);
FileEnvelope rdaJsonDocument = getRDAJsonDocument(depositRequest.getDmpId(), principal);
rdaJsonFile.setFile(rdaJsonDocument.getFile());
rdaJsonFile.setFilename(rdaJsonDocument.getFilename());
} catch (Exception e) {
throw e;
logger.error(e.getMessage(), e);
}
String previousDOI = this.getPreviousDOI(dmp.getGroupId(), dmp.getId(), depositRequest.getRepositoryId());
File supportingFilesZip = this.createSupportingFilesZip(dmp);
DMPDepositModel dmpDepositModel = DMPToDepositMapper.fromDMP(dmp, pdfFile, fileName, jsonFile, supportingFilesZip, previousDOI);
DMPDepositModel dmpDepositModel = DMPToDepositMapper.fromDMP(dmp, pdfEnvelope, rdaJsonFile, supportingFilesZip, previousDOI);
Optional<RepositoryDeposit> repo = this.repositoriesDeposit.stream().filter(x -> x.getConfiguration().getRepositoryId().equals(depositRequest.getRepositoryId())).findFirst();
String finalDoi = repo.map(r -> {
@ -2192,10 +2162,12 @@ public class DataManagementPlanManager {
if(supportingFilesZip != null) {
Files.deleteIfExists(supportingFilesZip.toPath());
}
Files.deleteIfExists(rdaJsonFile.getFile().toPath());
Files.deleteIfExists(pdfFile.toPath());
Files.deleteIfExists(file.getFile().toPath());
return doiModel;
}
private File createSupportingFilesZip(DMP dmp) throws IOException {

View File

@ -1136,7 +1136,7 @@ public class DatasetManager {
List<Tag> tags = apiContext.getOperationsContext().getElasticRepository().getDatasetRepository().query(criteria).stream().map(eu.eudat.elastic.entities.Dataset::getTags).flatMap(Collection::stream).filter(StreamDistinctBy.distinctByKey(Tag::getId)).collect(Collectors.toList());
Set<JsonNode> tagNodes = new HashSet<>();
tagNodes.addAll(JsonSearcher.findNodes(propertiesJson, "renderStyle", "tags", true));
tagNodes.addAll(JsonSearcher.findNodes(propertiesJson, "rdaProperty", "dataset.keyword"));
tagNodes.addAll(JsonSearcher.findNodes(propertiesJson, "schematics", "rda.dataset.keyword"));
if(wizardModel.getTags() == null){
wizardModel.setTags(new ArrayList<>());
}

View File

@ -10,13 +10,14 @@ import eu.eudat.data.query.items.item.datasetprofile.DatasetProfileAutocompleteR
import eu.eudat.data.query.items.table.datasetprofile.DatasetProfileTableRequestItem;
import eu.eudat.exceptions.datasetprofile.DatasetProfileNewVersionException;
import eu.eudat.logic.builders.model.models.DataTableDataBuilder;
import eu.eudat.logic.proxy.config.Semantic;
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.logic.services.operations.DatabaseRepository;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import eu.eudat.logic.utilities.documents.helpers.FileEnvelope;
import eu.eudat.logic.utilities.documents.xml.datasetProfileXml.ExportXmlBuilderDatasetProfile;
import eu.eudat.logic.utilities.documents.xml.datasetProfileXml.ImportXmlBuilderDatasetProfile;
import eu.eudat.logic.utilities.helpers.StreamDistinctBy;
import eu.eudat.models.data.components.commons.datafield.AutoCompleteData;
import eu.eudat.models.data.datasetprofile.DatasetProfileAutocompleteItem;
import eu.eudat.models.data.datasetprofile.DatasetProfileListingModel;
@ -34,23 +35,27 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.*;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.multipart.MultipartFile;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.activation.MimetypesFileTypeMap;
import javax.transaction.Transactional;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.*;
import java.io.*;
import java.nio.file.Files;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
@ -62,14 +67,16 @@ public class DatasetProfileManager {
private ApiContext apiContext;
private DatabaseRepository databaseRepository;
private Environment environment;
private ConfigLoader configLoader;
private final MetricsManager metricsManager;
@Autowired
public DatasetProfileManager(ApiContext apiContext, Environment environment, MetricsManager metricsManager) {
public DatasetProfileManager(ApiContext apiContext, Environment environment, ConfigLoader configLoader, MetricsManager metricsManager) {
this.apiContext = apiContext;
this.databaseRepository = apiContext.getOperationsContext().getDatabaseRepository();
this.environment = environment;
this.configLoader = configLoader;
this.metricsManager = metricsManager;
}
@ -363,4 +370,79 @@ public class DatasetProfileManager {
}
}
public List<String> getSemantics(String query) {
List<Semantic> semantics = configLoader.getSemantics();
List<String> filteredSemantics = semantics.stream().map(Semantic::getName).collect(Collectors.toList());
if(query != null && !query.isEmpty()){
filteredSemantics = semantics.stream().filter(x -> x.getCategory().contains(query) || x.getName().contains(query)).map(Semantic::getName).collect(Collectors.toList());
}
return filteredSemantics;
}
public void addSemanticsInDatasetProfiles() throws XPathExpressionException {
List<DatasetProfile> ids = this.databaseRepository.getDatasetProfileDao().getAllIds();
for(DatasetProfile dp: ids){
DatasetProfile datasetProfile = this.databaseRepository.getDatasetProfileDao().find(dp.getId());
Document document = XmlBuilder.fromXml(datasetProfile.getDefinition());
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
XPathExpression expr = xpath.compile("//rdaCommonStandard");
NodeList rdaProperties = (NodeList) expr.evaluate(document, XPathConstants.NODESET);
for(int i = 0; i < rdaProperties.getLength(); i++){
Node rdaPropertyNode = rdaProperties.item(i);
String rdaProperty = rdaPropertyNode.getTextContent();
Element schematics = document.createElement("schematics");
Node fieldParent = rdaPropertyNode.getParentNode();
if(rdaProperty != null && !rdaProperty.isEmpty()){
Element schematic = document.createElement("schematic");
schematic.setTextContent("rda." + rdaProperty);
schematics.appendChild(schematic);
}
fieldParent.insertBefore(schematics, rdaPropertyNode);
fieldParent.removeChild(rdaPropertyNode);
}
this.updateDatasetProfileXml(document, datasetProfile);
}
}
public void addRdaInSemanticsInDatasetProfiles() throws XPathExpressionException {
List<DatasetProfile> ids = this.databaseRepository.getDatasetProfileDao().getAllIds();
for(DatasetProfile dp: ids){
DatasetProfile datasetProfile = this.databaseRepository.getDatasetProfileDao().find(dp.getId());
Document document = XmlBuilder.fromXml(datasetProfile.getDefinition());
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
XPathExpression expr = xpath.compile("//schematic");
NodeList schematics = (NodeList) expr.evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < schematics.getLength(); i++) {
Node schematicNode = schematics.item(i);
String schematicRda = schematicNode.getTextContent();
if (schematicRda != null && !schematicRda.isEmpty() && !schematicRda.startsWith("rda.")) {
schematicNode.setTextContent("rda." + schematicRda);
}
}
this.updateDatasetProfileXml(document, datasetProfile);
}
}
private void updateDatasetProfileXml(Document document, DatasetProfile datasetProfile) {
try {
DOMSource domSource = new DOMSource(document);
StringWriter writer = new StringWriter();
StreamResult result = new StreamResult(writer);
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.transform(domSource, result);
String newDefinition = writer.toString();
if(newDefinition != null){
datasetProfile.setDefinition(newDefinition);
this.databaseRepository.getDatasetProfileDao().createOrUpdate(datasetProfile);
}
}
catch(TransformerException ex) {
logger.error(ex.getMessage(), ex);
}
}
}

View File

@ -0,0 +1,51 @@
package eu.eudat.logic.managers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component
public class MaterialManager {
@Autowired
public MaterialManager(){}
public ResponseEntity<byte[]> getResponseEntity(String lang, Stream<Path> paths) throws IOException {
List<String> result = paths.filter(Files::isRegularFile)
.map(Path::toString).collect(Collectors.toList());
String fileName = result.stream().filter(about -> about.contains("_" + lang)).findFirst().orElse(null);
if (fileName == null) {
fileName = result.stream().filter(about -> about.contains("_en")).findFirst().get();
}
InputStream is = new FileInputStream(fileName);
Path path = Paths.get(fileName);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentLength(is.available());
responseHeaders.setContentType(MediaType.TEXT_HTML);
responseHeaders.set("Content-Disposition", "attachment;filename=" + path.getFileName().toString());
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
byte[] content = new byte[is.available()];
is.read(content);
is.close();
return new ResponseEntity<>(content, responseHeaders, HttpStatus.OK);
}
}

View File

@ -0,0 +1,106 @@
package eu.eudat.logic.managers;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.Credential;
import eu.eudat.data.entities.EmailConfirmation;
import eu.eudat.data.entities.UserInfo;
import eu.eudat.data.entities.UserToken;
import eu.eudat.exceptions.emailconfirmation.HasConfirmedEmailException;
import eu.eudat.exceptions.emailconfirmation.TokenExpiredException;
import eu.eudat.logic.builders.entity.UserTokenBuilder;
import eu.eudat.logic.services.ApiContext;
import eu.eudat.logic.services.operations.DatabaseRepository;
import eu.eudat.models.data.security.Principal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.transaction.Transactional;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.*;
import java.util.stream.Collectors;
@Component
public class UnlinkEmailConfirmationManager {
private static Logger logger = LoggerFactory.getLogger(UnlinkEmailConfirmationManager.class);
private ApiContext apiContext;
private DatabaseRepository databaseRepository;
@Autowired
public UnlinkEmailConfirmationManager(ApiContext apiContext) {
this.apiContext = apiContext;
this.databaseRepository = apiContext.getOperationsContext().getDatabaseRepository();
}
@Transactional
public void confirmEmail(String token) throws TokenExpiredException, HasConfirmedEmailException {
EmailConfirmation loginConfirmationEmail = apiContext.getOperationsContext()
.getDatabaseRepository().getLoginConfirmationEmailDao().asQueryable()
.where((builder, root) -> builder.equal(root.get("token"), UUID.fromString(token))).getSingle();
if (loginConfirmationEmail.getExpiresAt().compareTo(new Date()) < 0)
throw new TokenExpiredException("Token has expired.");
if(loginConfirmationEmail.getIsConfirmed())
throw new HasConfirmedEmailException("Email is already confirmed.");
// UserInfo userAskingForUnlink = databaseRepository.getUserInfoDao().asQueryable()
// .where((builder, root) -> builder.equal(root.get("id"), loginConfirmationEmail.getUserId())).getSingle();
try {
Map<String, Object> map = new ObjectMapper().readValue(loginConfirmationEmail.getData(), new TypeReference<Map<String, Object>>() {});
String emailTobeUnlinked = (String) map.get("email");
Integer provider = Integer.valueOf((String) map.get("provider"));
unlinkUser(emailTobeUnlinked, provider);
loginConfirmationEmail.setIsConfirmed(true);
databaseRepository.getLoginConfirmationEmailDao().createOrUpdate(loginConfirmationEmail);
}
catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
@Transactional
private void unlinkUser(String emailTobeUnlinked, Integer provider){
Credential credential = databaseRepository.getCredentialDao().asQueryable()
.where((builder, root) -> builder.and(builder.equal(root.get("email"), emailTobeUnlinked), builder.equal(root.get("provider"), provider))).getSingle();
if(credential != null) {
UserInfo userTobeUnlinked = databaseRepository.getUserInfoDao().asQueryable()
.where((builder, root) -> builder.and(builder.equal(root.get("userStatus"), 1), builder.equal(root.get("name"), credential.getPublicValue()))).getSingle();
userTobeUnlinked.setEmail(emailTobeUnlinked);
userTobeUnlinked.setUserStatus((short) 0);
databaseRepository.getUserInfoDao().createOrUpdate(userTobeUnlinked);
credential.setUserInfo(userTobeUnlinked);
databaseRepository.getCredentialDao().createOrUpdate(credential);
UserToken userToken = this.apiContext.getOperationsContext().getBuilderFactory().getBuilder(UserTokenBuilder.class)
.token(UUID.randomUUID()).user(userTobeUnlinked)
.expiresAt(Timestamp.valueOf(LocalDateTime.now().plusDays(10))).issuedAt(new Date())
.build();
apiContext.getOperationsContext().getDatabaseRepository().getUserTokenDao().createOrUpdate(userToken);
}
}
public void sendConfirmationEmail(String email, Principal principal, UUID userId, Integer provider) {
UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
if (user.getEmail() != null && !user.getEmail().equals(email)) {
apiContext.getUtilitiesService().getConfirmationEmailService().createUnlinkConfirmationEmail(
databaseRepository.getLoginConfirmationEmailDao(),
apiContext.getUtilitiesService().getMailService(),
email,
userId,
principal,
provider
);
}
}
}

View File

@ -159,31 +159,10 @@ public class UserManager {
return dataTableData;
}
public Boolean isDOITokenValid(Principal principal) throws NonValidTokenException, ExpiredTokenException, IOException {
if (principal.getZenodoToken() != null && !principal.getZenodoToken().isEmpty()) {
if (Instant.now().isBefore(principal.getZenodoDuration())) {
return true;
}
try {
this.updateDOIToken(ZenodoAccessType.REFRESH_TOKEN, principal.getZenodoRefresh(), this.environment.getProperty("zenodo.login.redirect_uri"), principal);
return true;
}catch (Exception e) {
this.deleteDOIToken(principal);
throw new ExpiredTokenException("Zenodo Token is expired.");
}
}
throw new NonValidTokenException("This account has no Zenodo Token");
}
public void registerDOIToken(DOIRequest doiRequest, Principal principal) throws IOException {
this.updateDOIToken(ZenodoAccessType.AUTHORIZATION_CODE, doiRequest.getZenodoRequest().getCode(), doiRequest.getRedirectUri(), principal);
}
private void updateDOIToken(ZenodoAccessType accessType, String code, String redirectUri, Principal principal) throws IOException {
ZenodoResponseToken responseToken = this.zenodoCustomProvider.getAccessToken(accessType, code
, this.environment.getProperty("zenodo.login.client_id")
, this.environment.getProperty("zenodo.login.client_secret")
, redirectUri);
ZenodoResponseToken responseToken = this.zenodoCustomProvider.getAccessToken(ZenodoAccessType.AUTHORIZATION_CODE,
doiRequest.getZenodoRequest().getCode(), this.environment.getProperty("zenodo.login.client_id"),
this.environment.getProperty("zenodo.login.client_secret"), doiRequest.getRedirectUri());
Map<String, Object> settings = new HashMap<>();
settings.put("zenodoEmail", responseToken.getEmail());
settings.put("zenodoRefresh", responseToken.getRefreshToken());

View File

@ -112,17 +112,17 @@ public class PrefillingMapper {
throw e;
}
} else {
List<JsonNode> nodes = JsonSearcher.findNodes(parentNode, "rdaProperty", prefillingMapping.getMaDmpTarget());
List<JsonNode> nodes = JsonSearcher.findNodes(parentNode, "schematics", "rda." + prefillingMapping.getMaDmpTarget());
// zenodo prefilling customizations
if(type.equals("zenodo")){
if(prefillingMapping.getMaDmpTarget().equals("dataset.distribution.data_access")){
if(parsedValue != null && parsedValue.equals("open")){
List<JsonNode> issuedNodes = JsonSearcher.findNodes(parentNode, "rdaProperty", "dataset.issued");
List<JsonNode> issuedNodes = JsonSearcher.findNodes(parentNode, "schematics", "rda.dataset.issued");
if(!issuedNodes.isEmpty()){
String issuedIdNode = issuedNodes.get(0).get("id").asText();
String issuedValue = (String) properties.get(issuedIdNode);
List<JsonNode> licStartDateNodes = JsonSearcher.findNodes(parentNode, "rdaProperty", "dataset.distribution.license.start_date");
List<JsonNode> licStartDateNodes = JsonSearcher.findNodes(parentNode, "schematics", "rda.dataset.distribution.license.start_date");
for (JsonNode licStartDateNode : licStartDateNodes) {
String licStartDateId = licStartDateNode.get(0) != null ? licStartDateNode.get(0).get("id").asText() : licStartDateNode.get("id").asText();
properties.put(licStartDateId, issuedValue);

View File

@ -8,6 +8,8 @@ import javax.xml.bind.annotation.XmlElement;
public class DataFieldsUrlConfiguration {
private String id;
private String name;
private String pid;
private String pidTypeField;
private String uri;
private String description;
private String source;
@ -36,6 +38,23 @@ public class DataFieldsUrlConfiguration {
this.name = name;
}
public String getPid() {
return pid;
}
@XmlElement(name = "pid")
public void setPid(String pid) {
this.pid = pid;
}
public String getPidTypeField() {
return pidTypeField;
}
@XmlElement(name = "pidTypeField")
public void setPidTypeField(String pidTypeField) {
this.pidTypeField = pidTypeField;
}
public String getUri() {
return uri;

View File

@ -0,0 +1,25 @@
package eu.eudat.logic.proxy.config;
import com.fasterxml.jackson.annotation.JsonProperty;
public class Semantic {
@JsonProperty("category")
private String category;
@JsonProperty("name")
private String name;
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}

View File

@ -1,6 +1,7 @@
package eu.eudat.logic.proxy.config.configloaders;
import eu.eudat.logic.proxy.config.ExternalUrls;
import eu.eudat.logic.proxy.config.Semantic;
import eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.ConfigurableProviders;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
@ -9,7 +10,7 @@ import java.util.Map;
public interface ConfigLoader {
ExternalUrls getExternalUrls();
List<String> getRdaProperties();
List<Semantic> getSemantics();
XWPFDocument getDocument();
XWPFDocument getDatasetDocument();
ConfigurableProviders getConfigurableProviders();

View File

@ -1,8 +1,10 @@
package eu.eudat.logic.proxy.config.configloaders;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.logic.proxy.config.ExternalUrls;
import eu.eudat.logic.proxy.config.Semantic;
import eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.ConfigurableProviders;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.slf4j.Logger;
@ -35,7 +37,7 @@ public class DefaultConfigLoader implements ConfigLoader {
private static final ObjectMapper mapper = new ObjectMapper();
private ExternalUrls externalUrls;
private List<String> rdaProperties;
private List<Semantic> semantics;
private XWPFDocument document;
private XWPFDocument datasetDocument;
private ConfigurableProviders configurableProviders;
@ -64,24 +66,17 @@ public class DefaultConfigLoader implements ConfigLoader {
}
}
private void setRdaProperties() {
String filePath = environment.getProperty("configuration.rda");
private void setSemantics() {
String filePath = environment.getProperty("configuration.semantics");
logger.info("Loaded also config file: " + filePath);
BufferedReader reader;
List<String> rdaList = new LinkedList<>();
if (filePath != null) {
try {
reader = new BufferedReader(new InputStreamReader(getStreamFromPath(filePath)));
String line = reader.readLine();
while (line != null) {
rdaList.add(line);
line = reader.readLine();
semantics = mapper.readValue(getStreamFromPath(filePath), new TypeReference<List<Semantic>>(){});
}
reader.close();
} catch (IOException | NullPointerException e) {
catch (IOException e) {
logger.error(e.getMessage(), e);
}
rdaProperties = rdaList;
}
}
private void setDocument() {
@ -171,12 +166,12 @@ public class DefaultConfigLoader implements ConfigLoader {
return externalUrls;
}
public List<String> getRdaProperties() {
if (rdaProperties == null) {
rdaProperties = new ArrayList<>();
this.setRdaProperties();
public List<Semantic> getSemantics() {
if (semantics == null) {
semantics = new ArrayList<>();
this.setSemantics();
}
return rdaProperties;
return semantics;
}
public XWPFDocument getDocument() {

View File

@ -467,7 +467,14 @@ public class RemoteFetcher {
}
private String transformKey(DataUrlConfiguration dataUrlConfiguration, String key) {
if (dataUrlConfiguration.getFieldsUrlConfiguration().getId() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getId().replace("'",""))) return "pid";
if (dataUrlConfiguration.getFieldsUrlConfiguration().getId() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getId().replace("'",""))) {
if(dataUrlConfiguration.getFieldsUrlConfiguration().getPid() == null)
return "pid";
else
return "originalId";
}
if (dataUrlConfiguration.getFieldsUrlConfiguration().getPid() != null && key.equals("pid")) return "pid";
if (dataUrlConfiguration.getFieldsUrlConfiguration().getPidTypeField() != null && key.equals("pidTypeField")) return "pidTypeField";
if (dataUrlConfiguration.getFieldsUrlConfiguration().getDescription() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getDescription().replace("'",""))) return "description";
if (dataUrlConfiguration.getFieldsUrlConfiguration().getUri() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getUri().replace("'",""))) return "uri";
if (dataUrlConfiguration.getFieldsUrlConfiguration().getName() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getName().replace("'",""))) return "name";

View File

@ -1,11 +1,15 @@
package eu.eudat.logic.proxy.fetching;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.JacksonJsonProvider;
import eu.eudat.logic.proxy.config.DataUrlConfiguration;
import eu.eudat.logic.proxy.config.ExternalUrlCriteria;
import eu.eudat.logic.proxy.fetching.entities.Results;
import io.swagger.models.auth.In;
import net.minidev.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -17,6 +21,7 @@ import java.util.stream.Collectors;
public class RemoteFetcherUtils {
private final static Logger logger = LoggerFactory.getLogger(RemoteFetcherUtils.class);
private static final ObjectMapper mapper = new ObjectMapper();
public static Results getFromJson(DocumentContext jsonContext, DataUrlConfiguration jsonDataPath) {
return new Results(parseData(jsonContext, jsonDataPath),
@ -60,11 +65,38 @@ public class RemoteFetcherUtils {
try {
String value = ((String) getterMethod.invoke(jsonDataPath.getFieldsUrlConfiguration()));
if (value != null) {
if (field.getName().equals("pid") || field.getName().equals("pidTypeField")) {
String pid = null;
Object pidObj = stringObjectMap.get(value.split("\\.")[0]);
if(pidObj != null){
if(pidObj instanceof Map){
pid = ((Map<String, String>) pidObj).get(value.split("\\.")[1]);
}
else if(pidObj instanceof List){
Object o = ((List<Map<String,?>>) pidObj).get(0).get(value.split("\\.")[1]);
if(o instanceof String){
pid = (String)o;
}
else if(o instanceof Integer){
pid = String.valueOf(o);
}
}
}
if(pid != null) {
if ((field.getName().equals("pid"))){
parsedData.get(parsedData.size() - 1).put("pid", pid);
}
else{
parsedData.get(parsedData.size() - 1).put("pidTypeField", pid);
}
}
} else {
value = value.replace("'", "");
if (stringObjectMap.containsKey(value)) {
parsedData.get(parsedData.size() - 1).put(field.getName().equals("types") ? "tags" : value, normalizeValue(stringObjectMap.get(value), (field.getName().equals("types") || field.getName().equals("uri"))));
}
}
}
} catch (IllegalAccessException | InvocationTargetException e) {
logger.error(e.getLocalizedMessage(), e);
}
@ -84,8 +116,16 @@ public class RemoteFetcherUtils {
} else {
for (Object o : jarr) {
if ((o instanceof Map) && ((Map) o).containsKey("content")) {
try {
return ((Map<String, String>) o).get("content");
}
catch (ClassCastException e){
if(((Map<?, ?>) o).get("content") instanceof Integer) {
return String.valueOf(((Map<?, ?>) o).get("content"));
}
return null;
}
}
}
}
} else if (value instanceof Map) {

View File

@ -0,0 +1,68 @@
package eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.saml2;
import com.fasterxml.jackson.annotation.JsonValue;
public class CertificateInfo {
public enum KeyFormat {
JKS("JKS"), PKCS12("PKCS12");
private String type;
KeyFormat(String type) {
this.type = type;
}
@JsonValue
public String getType() { return type; }
public static KeyFormat fromType(String type) {
for (KeyFormat t: KeyFormat.values()) {
if (type.equals(t.getType())) {
return t;
}
}
throw new IllegalArgumentException("Unsupported Keystore format " + type);
}
}
private String alias;
private String password;
private String keystorePath;
private String keystorePassword;
private KeyFormat keyFormat;
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getKeystorePath() {
return keystorePath;
}
public void setKeystorePath(String keystorePath) {
this.keystorePath = keystorePath;
}
public String getKeystorePassword() {
return keystorePassword;
}
public void setKeystorePassword(String keystorePassword) {
this.keystorePassword = keystorePassword;
}
public KeyFormat getKeyFormat() {
return keyFormat;
}
public void setKeyFormat(KeyFormat keyFormat) {
this.keyFormat = keyFormat;
}
}

View File

@ -47,49 +47,24 @@ public class Saml2ConfigurableProvider extends ConfigurableProvider {
}
}
public enum KeyFormat {
JKS("JKS"), PKCS12("PKCS12");
private String type;
KeyFormat(String type) {
this.type = type;
}
@JsonValue
public String getType() { return type; }
public static KeyFormat fromType(String type) {
for (KeyFormat t: KeyFormat.values()) {
if (type.equals(t.getType())) {
return t;
}
}
throw new IllegalArgumentException("Unsupported Keystore format " + type);
}
}
private String spEntityId;
private String idpEntityId;
private String idpUrl;
private String idpArtifactUrl;
private String idpMetadataUrl;
private boolean assertionEncrypted;
private KeyFormat keyFormat;
private String keyAlias;
private String credentialPath;
private String archivePassword;
private String keyPassword;
private CertificateInfo encryptionCert;
private CertificateInfo signingCert;
private boolean responseSigned;
private boolean assertionSigned;
private boolean signatureRequired;
private String signatureKeyAlias;
private String signaturePath;
private String signatureKeyStorePassword;
private String signatureKeyPassword;
private SAML2UsingFormat usingFormat;
private Map<String, SAML2AttributeType> attributeTypes;
private Map<String, String> configurableUserFromAttributes;
private String binding;
private String assertionConsumerServiceUrl;
private boolean wantAssertionsSigned;
private boolean authnRequestsSigned;
public String getSpEntityId() {
return spEntityId;
@ -133,39 +108,18 @@ public class Saml2ConfigurableProvider extends ConfigurableProvider {
this.assertionEncrypted = assertionEncrypted;
}
public KeyFormat getKeyFormat() {
return keyFormat;
public CertificateInfo getEncryptionCert() {
return encryptionCert;
}
public void setKeyFormat(KeyFormat keyFormat) {
this.keyFormat = keyFormat;
public void setEncryptionCert(CertificateInfo encryptionCert) {
this.encryptionCert = encryptionCert;
}
public String getKeyAlias() {
return keyAlias;
public CertificateInfo getSigningCert() {
return signingCert;
}
public void setKeyAlias(String keyAlias) {
this.keyAlias = keyAlias;
}
public String getCredentialPath() {
return credentialPath;
}
public void setCredentialPath(String credentialPath) {
this.credentialPath = credentialPath;
}
public String getArchivePassword() {
return archivePassword;
}
public void setArchivePassword(String archivePassword) {
this.archivePassword = archivePassword;
}
public String getKeyPassword() {
return keyPassword;
}
public void setKeyPassword(String keyPassword) {
this.keyPassword = keyPassword;
public void setSigningCert(CertificateInfo signingCert) {
this.signingCert = signingCert;
}
public boolean isResponseSigned() {
@ -189,34 +143,6 @@ public class Saml2ConfigurableProvider extends ConfigurableProvider {
this.signatureRequired = signatureRequired;
}
public String getSignatureKeyAlias() {
return signatureKeyAlias;
}
public void setSignatureKeyAlias(String signatureKeyAlias) {
this.signatureKeyAlias = signatureKeyAlias;
}
public String getSignaturePath() {
return signaturePath;
}
public void setSignaturePath(String signaturePath) {
this.signaturePath = signaturePath;
}
public String getSignatureKeyStorePassword() {
return signatureKeyStorePassword;
}
public void setSignatureKeyStorePassword(String signatureKeyStorePassword) {
this.signatureKeyStorePassword = signatureKeyStorePassword;
}
public String getSignatureKeyPassword() {
return signatureKeyPassword;
}
public void setSignatureKeyPassword(String signatureKeyPassword) {
this.signatureKeyPassword = signatureKeyPassword;
}
public SAML2UsingFormat getUsingFormat() {
return usingFormat;
}
@ -252,4 +178,17 @@ public class Saml2ConfigurableProvider extends ConfigurableProvider {
this.assertionConsumerServiceUrl = assertionConsumerServiceUrl;
}
public boolean isWantAssertionsSigned() {
return wantAssertionsSigned;
}
public void setWantAssertionsSigned(boolean wantAssertionsSigned) {
this.wantAssertionsSigned = wantAssertionsSigned;
}
public boolean isAuthnRequestsSigned() {
return authnRequestsSigned;
}
public void setAuthnRequestsSigned(boolean authnRequestsSigned) {
this.authnRequestsSigned = authnRequestsSigned;
}
}

View File

@ -1,29 +1,46 @@
package eu.eudat.logic.security.repositorydeposit.mapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.entities.*;
import eu.eudat.depositinterface.models.*;
import org.springframework.http.ResponseEntity;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.xml.xpath.*;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class DMPToDepositMapper {
public static DMPDepositModel fromDMP(DMP entity, File pdfFile, String fileName, ResponseEntity<byte[]> jsonFile, File supportingFilesZip, String previousDOI) {
private static final Logger logger = LoggerFactory.getLogger(DMPToDepositMapper.class);
private static final ObjectMapper mapper = new ObjectMapper();
public static DMPDepositModel fromDMP(DMP entity, FileEnvelope pdfFile, FileEnvelope jsonFile, File supportingFilesZip, String previousDOI) {
DMPDepositModel deposit = new DMPDepositModel();
deposit.setId(entity.getId());
deposit.setVersion(entity.getVersion());
deposit.setLabel(entity.getLabel());
deposit.setDescription(entity.getDescription());
deposit.setPublic(entity.isPublic());
deposit.setDatasets(entity.getDataset().stream().map(DMPToDepositMapper::fromDataset).collect(Collectors.toList()));
deposit.setUsers(entity.getUsers().stream().map(DMPToDepositMapper::fromUserDMP).collect(Collectors.toSet()));
deposit.setOrganisations(entity.getOrganisations().stream().map(DMPToDepositMapper::fromOrganisation).collect(Collectors.toSet()));
deposit.setResearchers(entity.getResearchers().stream().map(DMPToDepositMapper::fromResearcher).collect(Collectors.toSet()));
deposit.setGrant(fromGrant(entity.getGrant()));
deposit.setPdfFile(pdfFile);
deposit.setPdfFileName(fileName);
deposit.setRdaJson(jsonFile);
deposit.setRdaJsonFile(jsonFile);
deposit.setSupportingFilesZip(supportingFilesZip);
deposit.setPreviousDOI(previousDOI);
@ -31,6 +48,70 @@ public class DMPToDepositMapper {
return deposit;
}
private static DatasetDepositModel fromDataset(Dataset entity){
DatasetDepositModel deposit = new DatasetDepositModel();
deposit.setLabel(entity.getLabel());
deposit.setDescription(entity.getDescription());
deposit.setProfileDefinition(entity.getProfile().getDefinition());
deposit.setProperties(entity.getProperties());
deposit.setFields(fromDefinitionAndProperties(deposit.getProfileDefinition(), deposit.getProperties()));
return deposit;
}
private static List<DatasetFieldsDepositModel> fromDefinitionAndProperties(String definition, String properties){
List<DatasetFieldsDepositModel> deposit = new ArrayList<>();
try {
Map<String, Object> datasetAnswers = mapper.readValue(properties, HashMap.class);
Document document = XmlBuilder.fromXml(definition);
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
XPathExpression expr = xpath.compile("//schematics");
NodeList schematics = (NodeList) expr.evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < schematics.getLength(); i++) {
Node schematicsNode = schematics.item(i);
NodeList schematicsList = schematicsNode.getChildNodes();
DatasetFieldsDepositModel fieldDeposit = new DatasetFieldsDepositModel();
List<String> schematicsDeposit = new ArrayList<>();
if(schematicsList != null){
for(int j = 0; j < schematicsList.getLength(); j++){
Node schematic = schematicsList.item(j);
if(schematic.getTextContent().matches(".*\\w+.*")) {
schematicsDeposit.add(schematic.getTextContent());
}
}
}
fieldDeposit.setSchematics(schematicsDeposit);
String fieldId = schematicsNode.getParentNode().getAttributes().getNamedItem("id").getNodeValue();
Object value = datasetAnswers.get(fieldId);
fieldDeposit.setValue(value);
Element field = (Element) schematicsNode.getParentNode();
Element viewStyle = (Element) field.getElementsByTagName("viewStyle").item(0);
String renderStyle = viewStyle.getAttribute("renderstyle");
fieldDeposit.setRenderStyleType(renderStyle);
Element data = (Element) field.getElementsByTagName("data").item(0);
String multipleSelection = data.getAttribute("multiList");
String multipleAutoComplete = data.getAttribute("multiAutoComplete");
if(!multipleSelection.isEmpty()){
fieldDeposit.setMultiple(Boolean.parseBoolean(multipleSelection));
}
else if(!multipleAutoComplete.isEmpty()){
fieldDeposit.setMultiple(Boolean.parseBoolean(multipleAutoComplete));
}
else{
fieldDeposit.setMultiple(false);
}
deposit.add(fieldDeposit);
}
}
catch (XPathExpressionException | JsonProcessingException ex){
logger.error(ex.getMessage(), ex);
return null;
}
return deposit;
}
private static UserDMPDepositModel fromUserDMP(UserDMP entity){
UserDMPDepositModel deposit = new UserDMPDepositModel();
deposit.setUser(fromUserInfo(entity.getUser()));

View File

@ -1,6 +1,8 @@
package eu.eudat.logic.security.validators.configurableProvider;
import eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.saml2.CertificateInfo;
import eu.eudat.logic.security.customproviders.ConfigurableProvider.entities.saml2.Saml2ConfigurableProvider;
import eu.eudat.logic.utilities.builders.XmlBuilder;
import jakarta.xml.soap.*;
import net.shibboleth.utilities.java.support.component.ComponentInitializationException;
import net.shibboleth.utilities.java.support.resolver.CriteriaSet;
@ -32,6 +34,7 @@ import org.opensaml.core.xml.config.XMLObjectProviderRegistry;
import org.opensaml.core.xml.io.*;
import org.opensaml.core.xml.schema.*;
import org.opensaml.saml.common.SAMLObject;
import org.opensaml.saml.common.SAMLObjectContentReference;
import org.opensaml.saml.common.SAMLVersion;
import org.opensaml.saml.common.xml.SAMLConstants;
import org.opensaml.saml.criterion.EntityRoleCriterion;
@ -40,7 +43,7 @@ import org.opensaml.saml.metadata.resolver.impl.HTTPMetadataResolver;
import org.opensaml.saml.metadata.resolver.impl.PredicateRoleDescriptorResolver;
import org.opensaml.saml.saml2.core.*;
import org.opensaml.saml.saml2.encryption.Decrypter;
import org.opensaml.saml.saml2.metadata.IDPSSODescriptor;
import org.opensaml.saml.saml2.metadata.*;
import org.opensaml.saml.security.impl.MetadataCredentialResolver;
import org.opensaml.security.credential.Credential;
import org.opensaml.security.credential.CredentialSupport;
@ -55,10 +58,14 @@ import org.opensaml.xml.util.Base64;
import org.opensaml.xmlsec.config.impl.DefaultSecurityConfigurationBootstrap;
import org.opensaml.xmlsec.encryption.EncryptedKey;
import org.opensaml.xmlsec.keyinfo.KeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.KeyInfoGenerator;
import org.opensaml.xmlsec.keyinfo.impl.StaticKeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.impl.X509KeyInfoGeneratorFactory;
import org.opensaml.xmlsec.signature.KeyInfo;
import org.opensaml.xmlsec.signature.Signature;
import org.opensaml.xmlsec.signature.X509Data;
import org.opensaml.xmlsec.signature.impl.SignatureBuilder;
import org.opensaml.xmlsec.signature.support.SignatureConstants;
import org.opensaml.xmlsec.signature.support.SignatureValidator;
import org.opensaml.xmlsec.signature.support.Signer;
import org.slf4j.Logger;
@ -85,6 +92,7 @@ import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.security.*;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.time.Instant;
import java.util.*;
@ -264,7 +272,7 @@ public class Saml2SSOUtils {
ArtifactResolve artifactResolve = createArtifactResolveObject(samlArtReceived, saml2Provider.getSpEntityId());
if (saml2Provider.isSignatureRequired()) {
signArtifactResolveReq(artifactResolve, saml2Provider);
signArtifactResolveReq(artifactResolve, saml2Provider.getSigningCert());
}
return artifactResolve;
@ -290,14 +298,14 @@ public class Saml2SSOUtils {
}
private static void signArtifactResolveReq(ArtifactResolve artifactResolve, Saml2ConfigurableProvider saml2Provider) throws Exception {
private static void signArtifactResolveReq(ArtifactResolve artifactResolve, CertificateInfo singingCertificateInfo) throws Exception {
try {
KeyStore ks = KeyStore.getInstance("JKS");
String archivePassword = saml2Provider.getSignatureKeyStorePassword();
String archivePassword = singingCertificateInfo.getKeystorePassword();
char[] pwdArray = (archivePassword != null) ? archivePassword.toCharArray() : "changeit".toCharArray();
ks.load(new FileInputStream(saml2Provider.getSignaturePath()), pwdArray);
X509Credential cred = new KeyStoreX509CredentialAdapter(ks, saml2Provider.getSignatureKeyAlias(), saml2Provider.getSignatureKeyPassword().toCharArray());
ks.load(new FileInputStream(singingCertificateInfo.getKeystorePath()), pwdArray);
X509Credential cred = new KeyStoreX509CredentialAdapter(ks, singingCertificateInfo.getAlias(), singingCertificateInfo.getPassword().toCharArray());
Signature signature = setSignatureRaw(XMLSignature.ALGO_ID_SIGNATURE_RSA, cred);
artifactResolve.setSignature(signature);
@ -541,7 +549,7 @@ public class Saml2SSOUtils {
if (!CollectionUtils.isEmpty(encryptedAssertions)) {
encryptedAssertion = encryptedAssertions.get(0);
try {
assertion = getDecryptedAssertion(encryptedAssertion, saml2Provider);
assertion = getDecryptedAssertion(encryptedAssertion, saml2Provider.getEncryptionCert());
} catch (Exception e) {
throw new Exception("Unable to decrypt the SAML2 Assertion");
}
@ -601,15 +609,15 @@ public class Saml2SSOUtils {
}
private static Assertion getDecryptedAssertion(EncryptedAssertion encryptedAssertion, Saml2ConfigurableProvider saml2Provider) throws Exception {
private static Assertion getDecryptedAssertion(EncryptedAssertion encryptedAssertion, CertificateInfo encryptionCertificateInfo) throws Exception {
try {
KeyStore ks = (saml2Provider.getKeyFormat().getType().equals("JKS")) ? KeyStore.getInstance("JKS") : KeyStore.getInstance("PKCS12");
String archivePassword = saml2Provider.getArchivePassword();
KeyStore ks = (encryptionCertificateInfo.getKeyFormat().getType().equals("JKS")) ? KeyStore.getInstance("JKS") : KeyStore.getInstance("PKCS12");
String archivePassword = encryptionCertificateInfo.getKeystorePassword();
char[] pwdArray = (archivePassword != null) ? archivePassword.toCharArray() : "changeit".toCharArray();
ks.load(new FileInputStream(saml2Provider.getCredentialPath()), pwdArray);
X509Certificate cert = (X509Certificate)ks.getCertificate(saml2Provider.getKeyAlias());
PrivateKey pk = (PrivateKey) ks.getKey(saml2Provider.getKeyAlias(), saml2Provider.getKeyPassword().toCharArray());
ks.load(new FileInputStream(encryptionCertificateInfo.getKeystorePath()), pwdArray);
X509Certificate cert = (X509Certificate)ks.getCertificate(encryptionCertificateInfo.getAlias());
PrivateKey pk = (PrivateKey) ks.getKey(encryptionCertificateInfo.getAlias(), encryptionCertificateInfo.getPassword().toCharArray());
KeyInfoCredentialResolver keyResolver = new StaticKeyInfoCredentialResolver(
new BasicX509Credential(cert, pk));
EncryptedKey key = encryptedAssertion.getEncryptedData().getKeyInfo().getEncryptedKeys().get(0);
@ -685,4 +693,186 @@ public class Saml2SSOUtils {
}
}
private static Credential getCredential(CertificateInfo certificateInfo) throws KeyStoreException, IOException, CertificateException, NoSuchAlgorithmException, UnrecoverableKeyException {
KeyStore ks = (certificateInfo.getKeyFormat().getType().equals("JKS")) ? KeyStore.getInstance("JKS") : KeyStore.getInstance("PKCS12");
String archivePassword = certificateInfo.getKeystorePassword();
char[] pwdArray = (archivePassword != null) ? archivePassword.toCharArray() : "changeit".toCharArray();
ks.load(new FileInputStream(certificateInfo.getKeystorePath()), pwdArray);
X509Certificate cert = (X509Certificate)ks.getCertificate(certificateInfo.getAlias());
PrivateKey pk = (PrivateKey) ks.getKey(certificateInfo.getAlias(), certificateInfo.getPassword().toCharArray());
return new BasicX509Credential(cert, pk);
}
public static String getMetadata(Saml2ConfigurableProvider provider) throws Exception {
EntityDescriptor spEntityDescriptor = (EntityDescriptor) buildXMLObject(EntityDescriptor.DEFAULT_ELEMENT_NAME);
spEntityDescriptor.setEntityID(provider.getSpEntityId());
SPSSODescriptor spSSODescriptor = (SPSSODescriptor) buildXMLObject(SPSSODescriptor.DEFAULT_ELEMENT_NAME);
spSSODescriptor.setWantAssertionsSigned(provider.isWantAssertionsSigned()); spSSODescriptor.setAuthnRequestsSigned(provider.isAuthnRequestsSigned());
X509KeyInfoGeneratorFactory keyInfoGeneratorFactory = new X509KeyInfoGeneratorFactory();
keyInfoGeneratorFactory.setEmitEntityCertificate(true);
KeyInfoGenerator keyInfoGenerator = keyInfoGeneratorFactory.newInstance();
if (provider.isAssertionEncrypted()) {
KeyDescriptor encKeyDescriptor = (KeyDescriptor) buildXMLObject(KeyDescriptor.DEFAULT_ELEMENT_NAME);
encKeyDescriptor.setUse(UsageType.ENCRYPTION); //Set usage
// Generating key info. The element will contain the public key. The key is used to by the IDP to encrypt data
try {
encKeyDescriptor.setKeyInfo(keyInfoGenerator.generate(getCredential(provider.getEncryptionCert())));
} catch (SecurityException e) {
logger.error(e.getMessage(), e);
}
spSSODescriptor.getKeyDescriptors().add(encKeyDescriptor);
}
if (provider.isWantAssertionsSigned()) {
KeyDescriptor signKeyDescriptor = (KeyDescriptor) buildXMLObject(KeyDescriptor.DEFAULT_ELEMENT_NAME);
signKeyDescriptor.setUse(UsageType.SIGNING); //Set usage
// Generating key info. The element will contain the public key. The key is used to by the IDP to verify signatures
try {
signKeyDescriptor.setKeyInfo(keyInfoGenerator.generate(getCredential(provider.getSigningCert())));
} catch (SecurityException e) {
logger.error(e.getMessage(), e);
}
spSSODescriptor.getKeyDescriptors().add(signKeyDescriptor);
}
NameIDFormat nameIDFormat = (NameIDFormat) buildXMLObject(NameIDFormat.DEFAULT_ELEMENT_NAME);
nameIDFormat.setFormat("urn:oasis:names:tc:SAML:2.0:nameid-format:transient");
spSSODescriptor.getNameIDFormats().add(nameIDFormat);
AssertionConsumerService assertionConsumerService = (AssertionConsumerService) buildXMLObject(AssertionConsumerService.DEFAULT_ELEMENT_NAME);
assertionConsumerService.setIndex(0);
switch (provider.getBinding()) {
case "Redirect":
assertionConsumerService.setBinding(SAMLConstants.SAML2_REDIRECT_BINDING_URI);
break;
case "Artifact":
assertionConsumerService.setBinding(SAMLConstants.SAML2_ARTIFACT_BINDING_URI);
break;
case "Post":
assertionConsumerService.setBinding(SAMLConstants.SAML2_POST_BINDING_URI);
break;
}
assertionConsumerService.setLocation(provider.getAssertionConsumerServiceUrl());
spSSODescriptor.getAssertionConsumerServices().add(assertionConsumerService);
spSSODescriptor.addSupportedProtocol(SAMLConstants.SAML20P_NS);
spEntityDescriptor.getRoleDescriptors().add(spSSODescriptor);
String metadataXML = null;
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document document = builder.newDocument();
Marshaller out = registry.getMarshallerFactory().getMarshaller(spEntityDescriptor);
out.marshall(spEntityDescriptor, document);
metadataXML = XmlBuilder.generateXml(document);
}
catch (MarshallingException | ParserConfigurationException e) {
logger.error(e.getMessage(), e);
}
return metadataXML;
}
public static String getAuthnRequest(Saml2ConfigurableProvider provider) throws Exception {
AuthnRequest authnRequest = buildAuthnRequest(provider);
String authnRequestXml = null;
DocumentBuilder builder;
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
try {
Signature signature = (Signature) buildXMLObject(Signature.DEFAULT_ELEMENT_NAME);
if(provider.isAuthnRequestsSigned()){
Credential credential = getCredential(provider.getSigningCert());
signature.setSigningCredential(credential);
signature.setSignatureAlgorithm(SignatureConstants.ALGO_ID_SIGNATURE_RSA_SHA256);
signature.setCanonicalizationAlgorithm(SignatureConstants.ALGO_ID_C14N_EXCL_OMIT_COMMENTS);
X509KeyInfoGeneratorFactory keyInfoGeneratorFactory = new X509KeyInfoGeneratorFactory();
keyInfoGeneratorFactory.setEmitEntityCertificate(true);
KeyInfoGenerator keyInfoGenerator = keyInfoGeneratorFactory.newInstance();
signature.setKeyInfo(keyInfoGenerator.generate(getCredential(provider.getSigningCert())));
authnRequest.setSignature(signature);
}
builder = factory.newDocumentBuilder();
Document document = builder.newDocument();
Marshaller out = registry.getMarshallerFactory().getMarshaller(authnRequest);
out.marshall(authnRequest, document);
if(provider.isAuthnRequestsSigned()) {
Signer.signObject(signature);
}
authnRequestXml = XmlBuilder.generateXml(document);
}
catch (MarshallingException | ParserConfigurationException e) {
logger.error(e.getMessage(), e);
}
return authnRequestXml;
}
private static AuthnRequest buildAuthnRequest(Saml2ConfigurableProvider provider) throws Exception {
AuthnRequest authnRequest = (AuthnRequest) buildXMLObject(AuthnRequest.DEFAULT_ELEMENT_NAME);
authnRequest.setIssueInstant(Instant.now());
authnRequest.setDestination(provider.getIdpUrl());
switch (provider.getBinding()) {
case "Redirect":
authnRequest.setProtocolBinding(SAMLConstants.SAML2_REDIRECT_BINDING_URI);
break;
case "Artifact":
authnRequest.setProtocolBinding(SAMLConstants.SAML2_ARTIFACT_BINDING_URI);
break;
case "Post":
authnRequest.setProtocolBinding(SAMLConstants.SAML2_POST_BINDING_URI);
break;
}
authnRequest.setAssertionConsumerServiceURL(provider.getAssertionConsumerServiceUrl());
authnRequest.setID('_' + UUID.randomUUID().toString());
authnRequest.setIssuer(buildIssuer(provider.getSpEntityId()));
authnRequest.setNameIDPolicy(buildNameIdPolicy());
return authnRequest;
}
private static NameIDPolicy buildNameIdPolicy() throws Exception {
NameIDPolicy nameIDPolicy = (NameIDPolicy) buildXMLObject(NameIDPolicy.DEFAULT_ELEMENT_NAME);
nameIDPolicy.setAllowCreate(true);
nameIDPolicy.setFormat(NameIDType.TRANSIENT);
return nameIDPolicy;
}
private static Issuer buildIssuer(String spEntityId) throws Exception {
Issuer issuer = (Issuer) buildXMLObject(Issuer.DEFAULT_ELEMENT_NAME);
issuer.setValue(spEntityId);
return issuer;
}
}

View File

@ -12,7 +12,11 @@ public interface ConfirmationEmailService {
public void createMergeConfirmationEmail(EmailConfirmationDao loginConfirmationEmailDao, MailService mailService, String email, UUID userId, Principal principal, Integer provider);
public void createUnlinkConfirmationEmail(EmailConfirmationDao loginConfirmationEmailDao, MailService mailService, String email, UUID userId, Principal principal, Integer provider);
public CompletableFuture sentConfirmationEmail(EmailConfirmation confirmationEmail, MailService mailService);
public CompletableFuture sentMergeConfirmationEmail(EmailConfirmation confirmationEmail, MailService mailService, String userName);
public CompletableFuture sentUnlinkConfirmationEmail(EmailConfirmation confirmationEmail, MailService mailService);
}

View File

@ -1,6 +1,7 @@
package eu.eudat.logic.services.utilities;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.eudat.data.dao.entities.EmailConfirmationDao;
import eu.eudat.data.entities.EmailConfirmation;
@ -84,6 +85,30 @@ public class ConfirmationEmailServiceImpl implements ConfirmationEmailService {
});
}
@Override
public CompletableFuture sentUnlinkConfirmationEmail(EmailConfirmation confirmationEmail, MailService mailService) {
String email = null;
try {
Map<String, Object> map = new ObjectMapper().readValue(confirmationEmail.getData(), new TypeReference<Map<String, Object>>() {});
email = (String) map.get("email");
}
catch (JsonProcessingException e){
logger.error(e.getMessage(), e);
}
String finalEmail = email;
return CompletableFuture.runAsync(() -> {
SimpleMail mail = new SimpleMail();
mail.setSubject(environment.getProperty("conf_email.subject"));
mail.setContent(createUnlinkContent(confirmationEmail.getToken(), mailService, finalEmail));
mail.setTo(confirmationEmail.getEmail());
try {
mailService.sendSimpleMail(mail);
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
}
});
}
private String createMergeContent(UUID confirmationToken, MailService mailService, String userName) {
String content = mailService.getMailTemplateContent(this.environment.getProperty("email.merge"));
content = content.replace("{userName}", userName);
@ -94,6 +119,16 @@ public class ConfirmationEmailServiceImpl implements ConfirmationEmailService {
return content;
}
private String createUnlinkContent(UUID confirmationToken, MailService mailService, String email) {
String content = mailService.getMailTemplateContent(this.environment.getProperty("email.unlink"));
content = content.replace("{confirmationToken}", confirmationToken.toString());
content = content.replace("{expiration_time}", secondsToTime(Integer.parseInt(this.environment.getProperty("conf_email.expiration_time_seconds"))));
content = content.replace("{host}", this.environment.getProperty("dmp.domain"));
content = content.replace("{email}", email);
return content;
}
private String secondsToTime(int seconds) {
int sec = seconds % 60;
int hour = seconds / 60;
@ -128,4 +163,30 @@ public class ConfirmationEmailServiceImpl implements ConfirmationEmailService {
sentMergeConfirmationEmail(confirmationEmail, mailService, principal.getName());
}
@Override
public void createUnlinkConfirmationEmail(EmailConfirmationDao loginConfirmationEmailDao, MailService mailService,
String email, UUID userId, Principal principal, Integer provider) {
EmailConfirmation confirmationEmail = new EmailConfirmation();
confirmationEmail.setEmail(principal.getEmail());
confirmationEmail.setExpiresAt(Date
.from(new Date()
.toInstant()
.plusSeconds(Long.parseLong(this.environment.getProperty("conf_email.expiration_time_seconds")))
)
);
confirmationEmail.setUserId(userId);
try {
Map<String, Object> map = new HashMap<>();
map.put("email", email);
map.put("provider", provider.toString());
confirmationEmail.setData(new ObjectMapper().writeValueAsString(map));
} catch (JsonProcessingException e) {
logger.error(e.getMessage(), e);
}
confirmationEmail.setIsConfirmed(false);
confirmationEmail.setToken(UUID.randomUUID());
confirmationEmail = loginConfirmationEmailDao.createOrUpdate(confirmationEmail);
sentUnlinkConfirmationEmail(confirmationEmail, mailService);
}
}

View File

@ -86,6 +86,11 @@ public class ExportXmlBuilder {
composite.appendChild(title);
}
composite.appendChild(createFields(compositeField.getFields(), visibilityRuleService, element));
if(compositeField.getHasCommentField()){
Element comment = element.createElement("comment");
comment.setTextContent(compositeField.getCommentFieldValue());
composite.appendChild(comment);
}
elementComposites.appendChild(composite);
}

View File

@ -164,10 +164,14 @@ public class ExportXmlBuilderDatasetProfile {
numbering.setTextContent(field.getNumbering());
elementField.appendChild(numbering);
}
if (field.getRdaProperty() != null) {
Element rdaProperty = element.createElement("rdaProperty");
rdaProperty.setTextContent(field.getRdaProperty());
elementField.appendChild(rdaProperty);
if (field.getSchematics() != null) {
Element schematics = element.createElement("schematics");
field.getSchematics().forEach(schematic -> {
Element schematicChild = element.createElement("schematic");
schematicChild.setTextContent(schematic);
schematics.appendChild(schematicChild);
});
elementField.appendChild(schematics);
}
if (field.getValidations() != null) {
Element validations = element.createElement("validations");

View File

@ -29,7 +29,7 @@ public class Field {
private Object data;
private String rdaProperty;
private Schematics schematics;
@XmlAttribute(name = "id")
public String getId() {
@ -103,16 +103,15 @@ public class Field {
this.data = data;
}
@XmlElement(name = "rdaProperty")
public String getRdaProperty() {
return rdaProperty;
@XmlElement(name = "schematics")
public Schematics getSchematics() {
return schematics;
}
public void setRdaProperty(String rdaProperty) {
this.rdaProperty = rdaProperty;
public void setSchematics(Schematics schematics) {
this.schematics = schematics;
}
public eu.eudat.models.data.admin.components.datasetprofile.Field toAdminCompositeModelSection() {
eu.eudat.models.data.admin.components.datasetprofile.Field fieldEntity =new eu.eudat.models.data.admin.components.datasetprofile.Field();
fieldEntity.setId(this.id);
@ -131,7 +130,14 @@ public class Field {
if (data != null) {
fieldEntity.setData(data.toMap((Element) this.data));
}
fieldEntity.setRdaCommonStandard(this.rdaProperty);
List<String> schematicsList = new LinkedList<>();
if (this.schematics != null && this.schematics.getSchematics() != null) {
for (Schematic schematic : this.schematics.getSchematics()) {
if (schematic != null && schematic.getSchematic() != null && !schematic.getSchematic().isEmpty())
schematicsList.add(schematic.getSchematic());
}
}
fieldEntity.setSchematics(schematicsList);
return fieldEntity;
}
}

View File

@ -0,0 +1,19 @@
package eu.eudat.logic.utilities.documents.xml.datasetProfileXml.datasetProfileModel.Fields;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue;
@XmlRootElement(name = "schematic")
public class Schematic {
private String schematic;
@XmlValue
public String getSchematic() {
return schematic;
}
public void setSchematic(String schematic) {
this.schematic = schematic;
}
}

View File

@ -0,0 +1,20 @@
package eu.eudat.logic.utilities.documents.xml.datasetProfileXml.datasetProfileModel.Fields;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.List;
@XmlRootElement(name = "schematics")
public class Schematics {
private List<Schematic> schematics;
@XmlElement(name = "schematic")
public List<Schematic> getSchematics() {
return schematics;
}
public void setSchematics(List<Schematic> schematics) {
this.schematics = schematics;
}
}

View File

@ -29,6 +29,18 @@ public class JsonSearcher {
}
found++;
}
else if(node.get(fieldName).isArray()){
for(JsonNode item: node.get(fieldName)){
if(item.asText().equals(value) || item.asText().startsWith(value)){
if (parent) {
nodes.add(root);
} else {
nodes.add(node);
}
found++;
}
}
}
}
}

View File

@ -45,7 +45,7 @@ public class Field implements ViewStyleDefinition<eu.eudat.models.data.entities.
private String id;
private Integer ordinal;
private String rdaCommonStandard;
private List<String> schematics;
private String value;
private ViewStyle viewStyle;
private String datatype;
@ -70,11 +70,12 @@ public class Field implements ViewStyleDefinition<eu.eudat.models.data.entities.
this.ordinal = ordinal;
}
public String getRdaCommonStandard() {
return rdaCommonStandard;
public List<String> getSchematics() {
return schematics;
}
public void setRdaCommonStandard(String rdaCommonStandard) {
this.rdaCommonStandard = rdaCommonStandard;
public void setSchematics(List<String> schematics) {
this.schematics = schematics;
}
public String getValue() {
@ -152,7 +153,7 @@ public class Field implements ViewStyleDefinition<eu.eudat.models.data.entities.
field.setVisible(this.visible);
field.setDefaultValue(this.defaultValue);
field.setValidations(this.validations);
field.setRdaCommonStandard(this.rdaCommonStandard);
field.setSchematics(this.schematics);
field.setExport(this.export);
return field;
}
@ -166,7 +167,7 @@ public class Field implements ViewStyleDefinition<eu.eudat.models.data.entities.
this.visible = item.getVisible();
this.defaultValue = item.getDefaultValue();
this.validations = item.getValidations();
this.rdaCommonStandard = item.getRdaCommonStandard();
this.schematics = item.getSchematics();
this.export = item.getExport();
}

View File

@ -18,7 +18,7 @@ import java.util.List;
public class Field implements DatabaseViewStyleDefinition, XmlSerializable<Field> {
private String id;
private int ordinal;
private String rdaCommonStandard;
private List<String> schematics;
private String numbering;
private ViewStyle viewStyle;
private DefaultValue defaultValue;
@ -42,11 +42,11 @@ public class Field implements DatabaseViewStyleDefinition, XmlSerializable<Field
this.ordinal = ordinal;
}
public String getRdaCommonStandard() {
return rdaCommonStandard;
public List<String> getSchematics() {
return schematics;
}
public void setRdaCommonStandard(String rdaCommonStandard) {
this.rdaCommonStandard = rdaCommonStandard;
public void setSchematics(List<String> schematics) {
this.schematics = schematics;
}
public ViewStyle getViewStyle() {
@ -105,8 +105,14 @@ public class Field implements DatabaseViewStyleDefinition, XmlSerializable<Field
rootElement.setAttribute("id", this.id);
rootElement.setAttribute("ordinal", "" + this.ordinal);
Element rdaCommonStandard = doc.createElement("rdaCommonStandard");
rdaCommonStandard.setTextContent(this.rdaCommonStandard);
Element schematics = doc.createElement("schematics");
if (this.schematics != null) {
for (String s : this.schematics) {
Element schematic = doc.createElement("schematic");
schematic.setTextContent(s);
schematics.appendChild(schematic);
}
}
Element viewStyle = doc.createElement("viewStyle");
viewStyle.setAttribute("renderstyle", this.viewStyle.getRenderStyle());
@ -128,7 +134,7 @@ public class Field implements DatabaseViewStyleDefinition, XmlSerializable<Field
Element numbering = doc.createElement("numbering");
numbering.setTextContent(this.numbering);
rootElement.appendChild(rdaCommonStandard);
rootElement.appendChild(schematics);
rootElement.appendChild(numbering);
rootElement.appendChild(validations);
rootElement.appendChild(defaultValue);
@ -158,8 +164,17 @@ public class Field implements DatabaseViewStyleDefinition, XmlSerializable<Field
Element numbering = XmlBuilder.getNodeFromListByTagName(element.getChildNodes(), "numbering");
if (numbering != null) this.numbering = numbering.getTextContent();
Element rdaCommonStandard = XmlBuilder.getNodeFromListByTagName(element.getChildNodes(), "rdaCommonStandard");
if (rdaCommonStandard != null) this.rdaCommonStandard = rdaCommonStandard.getTextContent();
this.schematics = new LinkedList<>();
Element schematics = (Element) XmlBuilder.getNodeFromListByTagName(element.getChildNodes(), "schematics");
if(schematics != null){
NodeList schematicElements = schematics.getChildNodes();
for (int temp = 0; temp < schematicElements.getLength(); temp++) {
Node schematicElement = schematicElements.item(temp);
if (schematicElement.getNodeType() == Node.ELEMENT_NODE) {
this.schematics.add(schematicElement.getTextContent());
}
}
}
Element dataElement = (Element) XmlBuilder.getNodeFromListByTagName(element.getChildNodes(), "data");

View File

@ -18,6 +18,7 @@ public class ExternalDatasetListingModel implements DataModel<ExternalDataset, E
private String info;
private ExternalDatasetType type;
private String pid;
private String pidTypeField;
private String uri;
private String tag; // Api fetching the data
private String source; // Actual harvested source
@ -85,6 +86,13 @@ public class ExternalDatasetListingModel implements DataModel<ExternalDataset, E
this.pid = pid;
}
public String getPidTypeField() {
return pidTypeField;
}
public void setPidTypeField(String pidTypeField) {
this.pidTypeField = pidTypeField;
}
public String getUri() {
return uri;
}

View File

@ -14,6 +14,7 @@ public class PublicationModel implements DataModel<DataRepository, PublicationMo
private List<String> ids;
private String name;
private String pid;
private String pidTypeField;
private String abbreviation;
private String uri;
private Date created;
@ -100,6 +101,14 @@ public class PublicationModel implements DataModel<DataRepository, PublicationMo
this.pid = pid;
}
public String getPidTypeField() {
return pidTypeField;
}
public void setPidTypeField(String pidTypeField) {
this.pidTypeField = pidTypeField;
}
public String getName() {
return name;
}

View File

@ -39,7 +39,7 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
private List<Field> multiplicityItems;
private List<eu.eudat.models.data.admin.components.datasetprofile.Field.ValidationType> validations;
private Visibility visible;
private String rdaProperty;
private List<String> schematics;
private Boolean export;
@ -154,12 +154,12 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
this.numbering = numbering;
}
public String getRdaProperty() {
return rdaProperty;
public List<String> getSchematics() {
return schematics;
}
public void setRdaProperty(String rdaProperty) {
this.rdaProperty = rdaProperty;
public void setSchematics(List<String> schematics) {
this.schematics = schematics;
}
public Boolean getExport() {
@ -181,7 +181,7 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
newField.defaultValue = this.defaultValue;
newField.data = this.data;
newField.validations = this.validations;
newField.rdaProperty = this.rdaProperty;
newField.schematics = this.schematics;
newField.numbering = "mult" + index + "_" + this.numbering;
newField.export = this.export;
return newField;
@ -196,7 +196,7 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
field.setDefaultValue(this.defaultValue);
field.setVisible(this.visible);
field.setValidations(this.validations);
field.setRdaCommonStandard(this.rdaProperty);
field.setSchematics(this.schematics);
field.setExport(this.export);
return field;
}
@ -211,7 +211,7 @@ public class Field implements Comparable, PropertiesModelBuilder, ViewStyleDefin
this.defaultValue = item.getDefaultValue();
this.visible = item.getVisible();
this.validations = item.getValidations();
this.rdaProperty = item.getRdaCommonStandard();
this.schematics = item.getSchematics();
this.export = item.getExport();
}

View File

@ -0,0 +1,30 @@
package eu.eudat.models.data.userinfo;
import java.util.UUID;
public class UserUnlinkRequestModel {
private UUID userId;
private String email;
private Integer provider;
public UUID getUserId() {
return userId;
}
public void setUserId(UUID userId) {
this.userId = userId;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Integer getProvider() {
return provider;
}
public void setProvider(Integer provider) {
this.provider = provider;
}
}

View File

@ -31,7 +31,19 @@ public class CostRDAMapper {
public static List<Cost> toRDAList(List<JsonNode> nodes) throws JsonProcessingException {
Map<String, Cost> rdaMap = new HashMap<>();
for(JsonNode node: nodes){
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dmp.cost")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String rdaValue = node.get("value").asText();
if(rdaValue == null || (rdaValue.isEmpty() && !node.get("value").isArray())){
continue;

View File

@ -27,7 +27,19 @@ public class DatasetIdRDAMapper {
public static DatasetId toRDA(List<JsonNode> nodes) {
DatasetId data = new DatasetId();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.dataset_id")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String rdaValue = node.get("value").asText();
if(rdaValue == null || rdaValue.isEmpty()){
continue;
@ -77,11 +89,14 @@ public class DatasetIdRDAMapper {
public static Map<String, String> toProperties(DatasetId rda, JsonNode node) {
Map<String, String> properties = new HashMap<>();
List<JsonNode> idNodes = JsonSearcher.findNodes(node, "rdaProperty", "dataset.dataset_id");
List<JsonNode> idNodes = JsonSearcher.findNodes(node, "schematics", "rda.dataset.dataset_id");
for (JsonNode idNode: idNodes) {
for (DatasetIdProperties datasetIdProperties : DatasetIdProperties.values()) {
if (idNode.get("rdaProperty").asText().endsWith(datasetIdProperties.getName())) {
JsonNode schematics = idNode.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().endsWith(datasetIdProperties.getName())){
switch (datasetIdProperties) {
case IDENTIFIER:
properties.put(idNode.get("id").asText(), rda.getIdentifier());
@ -90,7 +105,9 @@ public class DatasetIdRDAMapper {
properties.put(idNode.get("id").asText(), rda.getType().value());
break;
}
break;
}
}
}
}
}

View File

@ -59,32 +59,32 @@ public class DatasetRDAMapper {
ObjectMapper mapper = new ObjectMapper();
String datasetDescriptionJson = mapper.writeValueAsString(datasetWizardModel.getDatasetProfileDefinition());
JsonNode datasetDescriptionObj = mapper.readTree(datasetDescriptionJson);
List<JsonNode> idNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.dataset_id");
List<JsonNode> idNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.dataset_id");
if (!idNodes.isEmpty()) {
rda.setDatasetId(DatasetIdRDAMapper.toRDA(idNodes));
}
if (rda.getDatasetId() == null) {
rda.setDatasetId(new DatasetId(dataset.getId().toString(), DatasetId.Type.OTHER));
}
List<JsonNode> typeNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.type");
List<JsonNode> typeNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.type");
if (!typeNodes.isEmpty() && !typeNodes.get(0).get("value").asText().isEmpty()) {
rda.setType(typeNodes.get(0).get("value").asText());
} else {
rda.setType("DMP Dataset");
}
List<JsonNode> languageNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.language");
List<JsonNode> languageNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.language");
if (!languageNodes.isEmpty() && !languageNodes.get(0).get("value").asText().isEmpty()) {
rda.setLanguage(Language.fromValue(languageNodes.get(0).get("value").asText()));
} else {
rda.setLanguage(LanguageRDAMapper.mapLanguageIsoToRDAIso(dataset.getProfile().getLanguage()));
}
List<JsonNode> metadataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.metadata");
List<JsonNode> metadataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.metadata");
if (!metadataNodes.isEmpty()) {
rda.setMetadata(MetadataRDAMapper.toRDAList(metadataNodes));
}else{
rda.setMetadata(new ArrayList<>());
}
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.data_quality_assurance");
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.data_quality_assurance");
if (!qaNodes.isEmpty()) {
/*rda.setDataQualityAssurance(qaNodes.stream().map(qaNode -> qaNode.get("value").asText()).collect(Collectors.toList()));
for (int i = 0; i < qaNodes.size(); i++) {
@ -115,17 +115,17 @@ public class DatasetRDAMapper {
}else{
rda.setDataQualityAssurance(new ArrayList<>());
}
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.preservation_statement");
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.preservation_statement");
if (!preservationNodes.isEmpty() && !preservationNodes.get(0).get("value").asText().isEmpty()) {
rda.setPreservationStatement(preservationNodes.get(0).get("value").asText());
}
List<JsonNode> distributionNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.distribution");
List<JsonNode> distributionNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.distribution");
if (!distributionNodes.isEmpty()) {
rda.setDistribution(DistributionRDAMapper.toRDAList(distributionNodes));
}else{
rda.setDistribution(new ArrayList<>());
}
List<JsonNode> keywordNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.keyword");
List<JsonNode> keywordNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.keyword");
if (!keywordNodes.isEmpty()) {
rda.setKeyword(keywordNodes.stream().map(keywordNode -> {
JsonNode value = keywordNode.get("value");
@ -142,7 +142,7 @@ public class DatasetRDAMapper {
List<String> tags = apiContext.getOperationsContext().getElasticRepository().getDatasetRepository().findDocument(dataset.getId().toString()).getTags().stream().map(Tag::getName).collect(Collectors.toList());
rda.setKeyword(tags);
}
List<JsonNode> personalDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.personal_data");
List<JsonNode> personalDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.personal_data");
if (!personalDataNodes.isEmpty()) {
try{
rda.setPersonalData(personalDataNodes.stream().map(personalDataNode -> Dataset.PersonalData.fromValue(personalDataNode.get("value").asText())).findFirst().get());
@ -152,13 +152,13 @@ public class DatasetRDAMapper {
} else {
rda.setPersonalData(Dataset.PersonalData.UNKNOWN);
}
List<JsonNode> securityAndPrivacyNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.security_and_privacy");
List<JsonNode> securityAndPrivacyNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.security_and_privacy");
if (!securityAndPrivacyNodes.isEmpty()) {
rda.setSecurityAndPrivacy(SecurityAndPrivacyRDAMapper.toRDAList(securityAndPrivacyNodes));
}else{
rda.setSecurityAndPrivacy(new ArrayList<>());
}
List<JsonNode> sensitiveDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.sensitive_data");
List<JsonNode> sensitiveDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.sensitive_data");
if (!sensitiveDataNodes.isEmpty()) {
try{
rda.setSensitiveData(sensitiveDataNodes.stream().map(sensitiveDataNode -> Dataset.SensitiveData.fromValue(sensitiveDataNode.get("value").asText())).findFirst().get());
@ -168,35 +168,47 @@ public class DatasetRDAMapper {
} else {
rda.setSensitiveData(Dataset.SensitiveData.UNKNOWN);
}
List<JsonNode> technicalResourceNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.technical_resource");
List<JsonNode> technicalResourceNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.technical_resource");
if (!technicalResourceNodes.isEmpty()) {
rda.setTechnicalResource(TechnicalResourceRDAMapper.toRDAList(technicalResourceNodes));
}else{
rda.setTechnicalResource(new ArrayList<>());
}
List<JsonNode> issuedNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.issued");
List<JsonNode> issuedNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.issued");
if (!issuedNodes.isEmpty() && !issuedNodes.get(0).get("value").asText().isEmpty()) {
rda.setIssued(issuedNodes.get(0).get("value").asText());
}
List<JsonNode> contributorNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.contributor");
List<JsonNode> contributorNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dmp.contributor");
if (!contributorNodes.isEmpty()) {
dmp.getContributor().addAll(contributorNodes.stream().map(contributorNode -> {
JsonNode value = contributorNode.get("value");
if (value.isArray()) {
return StreamSupport.stream(value.spliterator(), false).map(node -> ContributorRDAMapper.toRDA(node.asText())).collect(Collectors.toList());
} else {
return Collections.singletonList(new Contributor()); // return null kalutera
return Collections.singletonList(new Contributor());
}
}).flatMap(Collection::stream).collect(Collectors.toList()));
}
List<JsonNode> costNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.cost");
List<JsonNode> costNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dmp.cost");
if (!costNodes.isEmpty()) {
dmp.getCost().addAll(CostRDAMapper.toRDAList(costNodes));
}
List<JsonNode> ethicsNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dmp.ethical_issues");
List<JsonNode> ethicsNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dmp.ethical_issues");
if (!ethicsNodes.isEmpty()) {
for(JsonNode node: ethicsNodes){
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dmp.ethical_issues")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String rdaValue = node.get("value").asText();
if(rdaValue == null || rdaValue.isEmpty()){
continue;
@ -276,12 +288,12 @@ public class DatasetRDAMapper {
String datasetDescriptionJson = mapper.writeValueAsString(datasetWizardModel.getDatasetProfileDefinition());
JsonNode datasetDescriptionObj = mapper.readTree(datasetDescriptionJson);
List<JsonNode> typeNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.type");
List<JsonNode> typeNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.type");
if (!typeNodes.isEmpty()) {
properties.put(typeNodes.get(0).get("id").asText(), rda.getType());
}
List<JsonNode> languageNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.language");
List<JsonNode> languageNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.language");
if (!languageNodes.isEmpty() && rda.getLanguage() != null) {
properties.put(languageNodes.get(0).get("id").asText(), rda.getLanguage().value());
}
@ -298,7 +310,7 @@ public class DatasetRDAMapper {
for (int i = 0; i < qaIds.size(); i++) {
properties.put(qaIds.get(i), rda.getDataQualityAssurance().get(i));
}*/
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.data_quality_assurance");
List<JsonNode> qaNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.data_quality_assurance");
if (!qaNodes.isEmpty() && rda.getDataQualityAssurance() != null && !rda.getDataQualityAssurance().isEmpty()) {
ObjectMapper m = new ObjectMapper();
List<String> qas = new ArrayList<>(rda.getDataQualityAssurance());
@ -310,12 +322,12 @@ public class DatasetRDAMapper {
}
}
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.preservation_statement");
List<JsonNode> preservationNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.preservation_statement");
if (!preservationNodes.isEmpty()) {
properties.put(preservationNodes.get(0).get("id").asText(), rda.getPreservationStatement());
}
List<JsonNode> issuedNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.issued");
List<JsonNode> issuedNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.issued");
if (!issuedNodes.isEmpty()) {
properties.put(issuedNodes.get(0).get("id").asText(), rda.getIssued());
}
@ -351,7 +363,7 @@ public class DatasetRDAMapper {
}
}
List<JsonNode> personalDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.personal_data");
List<JsonNode> personalDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.personal_data");
if (!personalDataNodes.isEmpty()) {
properties.put(personalDataNodes.get(0).get("id").asText(), rda.getPersonalData().value());
}
@ -360,7 +372,7 @@ public class DatasetRDAMapper {
properties.putAll(SecurityAndPrivacyRDAMapper.toProperties(rda.getSecurityAndPrivacy()));
}
List<JsonNode> sensitiveDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "rdaProperty", "dataset.sensitive_data");
List<JsonNode> sensitiveDataNodes = JsonSearcher.findNodes(datasetDescriptionObj, "schematics", "rda.dataset.sensitive_data");
if (!sensitiveDataNodes.isEmpty()) {
properties.put(sensitiveDataNodes.get(0).get("id").asText(), rda.getSensitiveData().value());
}

View File

@ -24,7 +24,10 @@ public class DistributionRDAMapper {
Map<String, Distribution> rdaMap = new HashMap<>();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = getRdaDistributionProperty(node);
if(rdaProperty.isEmpty()){
continue;
}
String rdaValue = node.get("value").asText();
//if(rdaValue == null || rdaValue.isEmpty()){
if(rdaValue == null || (rdaValue.isEmpty() && !node.get("value").isArray())){
@ -79,7 +82,16 @@ public class DistributionRDAMapper {
rda.setAdditionalProperty(ImportPropertyName.BYTE_SIZE.getName(), node.get("id").asText());
break;
case LICENSE:
List<JsonNode> licenseNodes = nodes.stream().filter(lnode -> lnode.get("rdaProperty").asText().toLowerCase().contains("license")).collect(Collectors.toList());
List<JsonNode> licenseNodes = nodes.stream().filter(lnode -> {
if(lnode.get("schematics").isArray()){
for(JsonNode schematic: lnode.get("schematics")){
if(schematic.asText().startsWith("rda.dataset.distribution.license")){
return true;
}
}
}
return false;
}).collect(Collectors.toList());
License license = LicenseRDAMapper.toRDA(licenseNodes);
rda.setLicense(license != null? Collections.singletonList(license): new ArrayList<>());
break;
@ -182,11 +194,14 @@ public class DistributionRDAMapper {
public static Map<String, String> toProperties(Distribution rda, JsonNode root) {
Map<String, String> properties = new HashMap<>();
List<JsonNode> distributionNodes = JsonSearcher.findNodes(root, "rdaProperty", "dataset.distribution");
List<JsonNode> distributionNodes = JsonSearcher.findNodes(root, "schematics", "rda.dataset.distribution");
for (JsonNode distributionNode: distributionNodes) {
for (ExportPropertyName exportPropertyName: ExportPropertyName.values()) {
if (distributionNode.get("rdaProperty").asText().contains(exportPropertyName.getName())) {
JsonNode schematics = distributionNode.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().contains(exportPropertyName.getName())){
switch (exportPropertyName) {
case ACCESS_URL:
properties.put(distributionNode.get("id").asText(), rda.getAccessUrl());
@ -248,6 +263,9 @@ public class DistributionRDAMapper {
}
break;
}
break;
}
}
}
}
}
@ -258,7 +276,10 @@ public class DistributionRDAMapper {
public static Distribution toRDA(List<JsonNode> nodes) {
Distribution rda = new Distribution();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = getRdaDistributionProperty(node);
if(rdaProperty.isEmpty()){
continue;
}
String rdaValue = node.get("value").asText();
for (ExportPropertyName exportPropertyName: ExportPropertyName.values()) {
if (rdaProperty.contains(exportPropertyName.getName())) {
@ -288,11 +309,29 @@ public class DistributionRDAMapper {
rda.setFormat(Collections.singletonList(rdaValue));
break;
case LICENSE:
List<JsonNode> licenseNodes = nodes.stream().filter(lnode -> lnode.get("rdaProperty").asText().toLowerCase().contains("license")).collect(Collectors.toList());
List<JsonNode> licenseNodes = nodes.stream().filter(lnode -> {
if(lnode.get("schematics").isArray()){
for(JsonNode schematic: lnode.get("schematics")){
if(schematic.asText().startsWith("rda.dataset.distribution.license")){
return true;
}
}
}
return false;
}).collect(Collectors.toList());
rda.setLicense(Collections.singletonList(LicenseRDAMapper.toRDA(licenseNodes)));
break;
case HOST:
List<JsonNode> hostNodes = nodes.stream().filter(lnode -> lnode.get("rdaProperty").asText().toLowerCase().contains("host")).collect(Collectors.toList());
List<JsonNode> hostNodes = nodes.stream().filter(lnode -> {
if(lnode.get("schematics").isArray()){
for(JsonNode schematic: lnode.get("schematics")){
if(schematic.asText().startsWith("rda.dataset.distribution.host")){
return true;
}
}
}
return false;
}).collect(Collectors.toList());
rda.setHost(HostRDAMapper.toRDA(hostNodes, "0"));
break;
}
@ -333,6 +372,20 @@ public class DistributionRDAMapper {
return rda;
}
private static String getRdaDistributionProperty(JsonNode node) {
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.distribution")){
rdaProperty = schematic.asText();
break;
}
}
}
return rdaProperty;
}
private static Distribution getRelative( Map<String, Distribution> rdaMap, String numbering) {
return rdaMap.entrySet().stream().filter(entry -> MyStringUtils.getFirstDifference(entry.getKey(), numbering) > 0)
.max(Comparator.comparingInt(entry -> MyStringUtils.getFirstDifference(entry.getKey(), numbering))).map(Map.Entry::getValue).orElse(new Distribution());

View File

@ -19,7 +19,19 @@ public class HostRDAMapper {
public static Host toRDA(List<JsonNode> nodes, String numbering) {
Host rda = new Host();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.distribution.host")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
if (rdaProperty.contains("host")) {
int firstDiff = MyStringUtils.getFirstDifference(numbering, node.get("numbering").asText());
if (firstDiff == -1 || firstDiff >= 2) {

View File

@ -17,7 +17,19 @@ public class LicenseRDAMapper {
public static License toRDA(List<JsonNode> nodes) {
License rda = new License();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.distribution.license")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String value = node.get("value").asText();
if(value == null || value.isEmpty()){
continue;
@ -78,11 +90,14 @@ public class LicenseRDAMapper {
public static Map<String, String> toProperties(License rda, JsonNode root) {
Map<String, String> properties = new HashMap<>();
List<JsonNode> licenseNodes = JsonSearcher.findNodes(root, "rdaProperty", "dataset.distribution.license");
List<JsonNode> licenseNodes = JsonSearcher.findNodes(root, "schematics", "rda.dataset.distribution.license");
for (JsonNode licenseNode: licenseNodes) {
for (LicenceProperties licenceProperty: LicenceProperties.values()) {
if (licenseNode.get("rdaProperty").asText().endsWith(licenceProperty.getName())) {
JsonNode schematics = licenseNode.get("schematics");
if(schematics.isArray()) {
for (JsonNode schematic : schematics) {
if (schematic.asText().endsWith(licenceProperty.getName())) {
switch (licenceProperty) {
case LICENSE_REF:
if (rda.getLicenseRef() != null) {
@ -94,6 +109,9 @@ public class LicenseRDAMapper {
break;
}
}
break;
}
}
}
}

View File

@ -20,7 +20,19 @@ public class MetadataRDAMapper {
Map<String, String> rdaMap = new HashMap<>();
List<Metadatum> rdas = new ArrayList<>();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.metadata")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
JsonNode rdaValue = node.get("value");
for (PropertyName propertyName: PropertyName.values()) {
@ -119,7 +131,16 @@ public class MetadataRDAMapper {
public static Metadatum toRDA(JsonNode node) {
Metadatum rda = new Metadatum();
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.metadata")){
rdaProperty = schematic.asText();
break;
}
}
}
JsonNode rdaValue = node.get("value");
if (rdaProperty.contains("metadata_standard_id")) {
if (rdaValue instanceof ArrayNode) {

View File

@ -16,7 +16,19 @@ public class SecurityAndPrivacyRDAMapper {
Map<String, SecurityAndPrivacy> rdaMap = new HashMap<>();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.security_and_privacy")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String rdaValue = node.get("value").asText();
if(rdaValue == null || rdaValue.isEmpty()){
continue;
@ -69,8 +81,17 @@ public class SecurityAndPrivacyRDAMapper {
public static SecurityAndPrivacy toRDA(JsonNode node) {
SecurityAndPrivacy rda = new SecurityAndPrivacy();
String rdaProperty = node.get("rdaProperty").asText();
String value =node.get("value").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.security_and_privacy")){
rdaProperty = schematic.asText();
break;
}
}
}
String value = node.get("value").asText();
if (rdaProperty.contains("description")) {
rda.setDescription(value);

View File

@ -16,7 +16,19 @@ public class TechnicalResourceRDAMapper {
Map<String, TechnicalResource> rdaMap = new HashMap<>();
for (JsonNode node: nodes) {
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.technical_resource")){
rdaProperty = schematic.asText();
break;
}
}
}
else{
continue;
}
String rdaValue = node.get("value").asText();
if(rdaValue == null || rdaValue.isEmpty()){
continue;
@ -70,7 +82,16 @@ public class TechnicalResourceRDAMapper {
public static TechnicalResource toRDA(JsonNode node) {
TechnicalResource rda = new TechnicalResource();
String rdaProperty = node.get("rdaProperty").asText();
String rdaProperty = "";
JsonNode schematics = node.get("schematics");
if(schematics.isArray()){
for(JsonNode schematic: schematics){
if(schematic.asText().startsWith("rda.dataset.technical_resource")){
rdaProperty = schematic.asText();
break;
}
}
}
String value = node.get("value").asText();
if (rdaProperty.contains("description")) {

View File

@ -47,6 +47,7 @@ public class PublicDmpsDocumentation extends BaseController {
"4.6. version: integer, version of dmp\n" +
"4.7. groupId: uuid, group id in which dmp belongs\n" +
"4.8. users: list of UserInfoPublicModel, user who collaborated on the dmp\n" +
"4.9. researchers: list of ResearcherPublicModel, researchers involved in the dmp\n" +
"4.9. finalizedAt: date, finalization date\n" +
"4.10. publishedAt: date, publication date\n";
private static final String getPagedResponseExample = "{\n" +
@ -94,13 +95,16 @@ public class PublicDmpsDocumentation extends BaseController {
" 1. periodStart: date, dmps created date greater than periodStart\n" +
" 2. periodEnd: date, dmps created date less than periodEnd\n" +
" 3. grants: list of uuids, dmps with the corresponding grants\n" +
" 4. funders: list of uuids, dmps with the corresponding funders\n" +
" 5. datasetTemplates: list of uuids, dataset templates which are described in the dmps\n" +
" 6. dmpOrganisations: list of strings, dmps belonging to these organisations\n" +
" 7. collaborators: list of uuids, user who collaborated on the creation/modification of dmps\n" +
" 8. allVersions: boolean, if dmps should be fetched with all their versions\n" +
" 9. groupIds: list of uuids, in which groups the dmps are\n" +
"10. like: string, dmps fetched have this string matched in their label or description\n";
" 4. grantsLike: list of strings, dmps fetched having their grant matching any of the strings provided\n" +
" 5. funders: list of uuids, dmps with the corresponding funders\n" +
" 6. fundersLike: list of strings, dmps fetched having their funders matching any of the strings provided\n" +
" 7. datasetTemplates: list of uuids, dataset templates which are described in the dmps\n" +
" 8. dmpOrganisations: list of strings, dmps belonging to these organisations\n" +
" 9. collaborators: list of uuids, user who collaborated on the creation/modification of dmps\n" +
"10. collaboratorsLike: list of strings, dmps fetched having their collaborators matching any of the strings provided\n" +
"11. allVersions: boolean, if dmps should be fetched with all their versions\n" +
"12. groupIds: list of uuids, in which groups the dmps are\n" +
"13. like: string, dmps fetched have this string matched in their label or description\n";
private static final String getPagedRequestParamDescription = "The fieldsGroup is a string which indicates if the returned objects would have all their properties\n" +
"There are two available values: 1) listing and 2) autocomplete\n" +
"**listing**: returns objects with all their properties completed\n" +

View File

@ -15,14 +15,20 @@ public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
private Date periodEnd;
@ApiModelProperty(value = "grants", name = "grants", dataType = "List<UUID>", example = "[]")
private List<UUID> grants;
@ApiModelProperty(value = "grantsLike", name = "grantsLike", dataType = "List<String>", example = "[]")
private List<String> grantsLike;
@ApiModelProperty(value = "funders", name = "funders", dataType = "List<UUID>", example = "[]")
private List<UUID> funders;
@ApiModelProperty(value = "fundersLike", name = "fundersLike", dataType = "List<String>", example = "[]")
private List<String> fundersLike;
@ApiModelProperty(value = "datasetTemplates", name = "datasetTemplates", dataType = "List<UUID>", example = "[]")
private List<UUID> datasetTemplates;
@ApiModelProperty(value = "dmpOrganisations", name = "dmpOrganisations", dataType = "List<String>", example = "[]")
private List<String> dmpOrganisations;
@ApiModelProperty(value = "collaborators", name = "collaborators", dataType = "List<UUID>", example = "[]")
private List<UUID> collaborators;
@ApiModelProperty(value = "collaboratorsLike", name = "collaboratorsLike", dataType = "List<String>", example = "[]")
private List<String> collaboratorsLike;
@ApiModelProperty(value = "allVersions", name = "allVersions", dataType = "Boolean", example = "false")
private boolean allVersions;
@ApiModelProperty(value = "groupIds", name = "groupIds", dataType = "List<UUID>", example = "[]")
@ -49,6 +55,13 @@ public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
this.grants = grants;
}
public List<String> getGrantsLike() {
return grantsLike;
}
public void setGrantsLike(List<String> grantsLike) {
this.grantsLike = grantsLike;
}
public List<UUID> getFunders() {
return funders;
}
@ -56,6 +69,13 @@ public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
this.funders = funders;
}
public List<String> getFundersLike() {
return fundersLike;
}
public void setFundersLike(List<String> fundersLike) {
this.fundersLike = fundersLike;
}
public List<UUID> getDatasetTemplates() {
return datasetTemplates;
}
@ -77,6 +97,13 @@ public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
this.collaborators = collaborators;
}
public List<String> getCollaboratorsLike() {
return collaboratorsLike;
}
public void setCollaboratorsLike(List<String> collaboratorsLike) {
this.collaboratorsLike = collaboratorsLike;
}
public boolean getAllVersions() {
return allVersions;
}

View File

@ -57,8 +57,6 @@ public class DataManagementPlanPublicManager {
dataTable.setTotalCount(count);
});
CompletableFuture.allOf(itemsFuture, countFuture).join();
if(dataTable.getTotalCount() > dmpTableRequest.getLength())
dataTable.setTotalCount((long)dmpTableRequest.getLength());
return dataTable;
}

View File

@ -113,11 +113,7 @@ public class DatasetPublicManager {
select(this::mapPublicModel);
dataTable.setData(datasetLists.stream().filter(Objects::nonNull).collect(Collectors.toList()));
if(count <= datasetTableRequest.getLength())
dataTable.setTotalCount(count);
else
dataTable.setTotalCount((long)datasetTableRequest.getLength());
//CompletableFuture.allOf(countFuture).join();
return dataTable;
}

View File

@ -3,12 +3,10 @@ package eu.eudat.publicapi.models.listingmodels;
import eu.eudat.data.entities.DMP;
import eu.eudat.data.entities.Grant;
import eu.eudat.models.DataModel;
import eu.eudat.publicapi.models.researcher.ResearcherPublicModel;
import eu.eudat.publicapi.models.user.UserInfoPublicModel;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.*;
import java.util.stream.Collectors;
public class DataManagementPlanPublicListingModel implements DataModel<DMP, DataManagementPlanPublicListingModel> {
@ -20,6 +18,7 @@ public class DataManagementPlanPublicListingModel implements DataModel<DMP, Data
private int version;
private UUID groupId;
private List<UserInfoPublicModel> users;
private List<ResearcherPublicModel> researchers;
private Date finalizedAt;
private Date publishedAt;
@ -79,6 +78,13 @@ public class DataManagementPlanPublicListingModel implements DataModel<DMP, Data
this.users = users;
}
public List<ResearcherPublicModel> getResearchers() {
return researchers;
}
public void setResearchers(List<ResearcherPublicModel> researchers) {
this.researchers = researchers;
}
public Date getFinalizedAt() {
return finalizedAt;
}
@ -125,9 +131,11 @@ public class DataManagementPlanPublicListingModel implements DataModel<DMP, Data
this.modifiedAt = entity.getModified();
try {
this.users = entity.getUsers() != null ? entity.getUsers().stream().map(x -> new UserInfoPublicModel().fromDataModel(x)).collect(Collectors.toList()) : new ArrayList<>();
this.researchers = entity.getResearchers() != null ? entity.getResearchers().stream().map(x -> new ResearcherPublicModel().fromDataModel(x)).collect(Collectors.toList()) : new ArrayList<>();
}
catch(Exception ex){
this.users = new ArrayList<>();
this.researchers = new ArrayList<>();
}
this.finalizedAt = entity.getFinalizedAt();
this.publishedAt = entity.getPublishedAt();
@ -151,11 +159,12 @@ public class DataManagementPlanPublicListingModel implements DataModel<DMP, Data
grant.setLabel(this.getGrant());
entity.setGrant(grant);
entity.setUsers(this.getUsers().stream().map(UserInfoPublicModel::toDataModel).collect(Collectors.toSet()));
entity.setResearchers(this.getResearchers().stream().map(ResearcherPublicModel::toDataModel).collect(Collectors.toSet()));
return entity;
}
@Override
public String getHint() {
return "dataManagementPlanListingModel";
return "fullyDetailed";
}
}

View File

@ -8,9 +8,9 @@ import eu.eudat.queryable.QueryableList;
import eu.eudat.queryable.types.FieldSelectionType;
import eu.eudat.queryable.types.SelectionField;
import java.util.Arrays;
import java.util.Date;
import java.util.UUID;
import javax.persistence.criteria.Predicate;
import java.util.*;
import java.util.stream.Collectors;
public class DataManagmentPlanPublicTableRequest extends TableQuery<DataManagementPlanPublicCriteria, DMP, UUID> {
@ -27,8 +27,36 @@ public class DataManagmentPlanPublicTableRequest extends TableQuery<DataManageme
query.where((builder, root) -> builder.lessThan(root.get("created"), this.getCriteria().getPeriodEnd()));
if (this.getCriteria().getGrants() != null && !this.getCriteria().getGrants().isEmpty())
query.where(((builder, root) -> root.get("grant").get("id").in(this.getCriteria().getGrants())));
if (this.getCriteria().getGrantsLike() != null && !this.getCriteria().getGrantsLike().isEmpty()) {
query.where(((builder, root) -> {
List<Predicate> predicates = new ArrayList<>();
for(String grantLike: this.getCriteria().getGrantsLike()){
String pattern = "%" + grantLike.toUpperCase() + "%";
predicates.add(builder.like(builder.upper(root.get("grant").get("label")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("abbreviation")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("reference")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("definition")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("description")), pattern));
}
return builder.or(predicates.toArray(new Predicate[0]));
}
));
}
if (this.getCriteria().getFunders() != null && !this.getCriteria().getFunders().isEmpty())
query.where(((builder, root) -> root.get("grant").get("funder").get("id").in(this.getCriteria().getFunders())));
if (this.getCriteria().getFundersLike() != null && !this.getCriteria().getFundersLike().isEmpty()) {
query.where(((builder, root) -> {
List<Predicate> predicates = new ArrayList<>();
for(String funderLike: this.getCriteria().getFundersLike()){
String pattern = "%" + funderLike.toUpperCase() + "%";
predicates.add(builder.like(builder.upper(root.get("grant").get("funder").get("label")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("funder").get("reference")), pattern));
predicates.add(builder.like(builder.upper(root.get("grant").get("funder").get("definition")), pattern));
}
return builder.or(predicates.toArray(new Predicate[0]));
}
));
}
//query.where((builder, root) -> builder.lessThan(root.get("grant").get("enddate"), new Date())); // GrantStateType.FINISHED
query.where((builder, root) ->
@ -41,6 +69,19 @@ public class DataManagmentPlanPublicTableRequest extends TableQuery<DataManageme
query.where(((builder, root) -> root.join("organisations").get("reference").in(this.getCriteria().getDmpOrganisations())));
if (this.getCriteria().getCollaborators() != null && !this.getCriteria().getCollaborators().isEmpty())
query.where(((builder, root) -> root.join("researchers").get("id").in(this.getCriteria().getCollaborators())));
if (this.getCriteria().getCollaboratorsLike() != null && !this.getCriteria().getCollaboratorsLike().isEmpty()) {
query.where(((builder, root) -> {
List<Predicate> predicates = new ArrayList<>();
for(String collaboratorLike: this.getCriteria().getCollaboratorsLike()){
String pattern = "%" + collaboratorLike.toUpperCase() + "%";
predicates.add(builder.like(builder.upper(root.join("researchers").get("label")), pattern));
predicates.add(builder.like(builder.upper(root.join("researchers").get("uri")), pattern));
predicates.add(builder.like(builder.upper(root.join("researchers").get("primaryEmail")), pattern));
}
return builder.or(predicates.toArray(new Predicate[0]));
}
));
}
if (!this.getCriteria().getAllVersions()) {
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("version"),
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(builder1.equal(externalRoot.get("groupId"),
@ -49,6 +90,7 @@ public class DataManagmentPlanPublicTableRequest extends TableQuery<DataManageme
if (this.getCriteria().getGroupIds() != null && !this.getCriteria().getGroupIds().isEmpty()) {
query.where((builder, root) -> root.get("groupId").in(this.getCriteria().getGroupIds()));
}
query.where((builder, root) -> builder.notEqual(root.get("status"), DMP.DMPStatus.DELETED.getValue()));
return query;
}

View File

@ -1,70 +0,0 @@
[
{
"Funder": "Australian Research Council",
"DOI": "10.13039/501100000923"
},
{
"Funder": "Austrian Science Fund",
"DOI": "10.13039/501100002428"
},
{
"Funder": "European Commission",
"DOI": "10.13039/501100000780"
},
{
"Funder": "European Environment Agency",
"DOI": "10.13039/501100000806"
},
{
"Funder": "Academy of Finland",
"DOI": "10.13039/501100002341"
},
{
"Funder": "Hrvatska Zaklada za Znanost",
"DOI": "10.13039/501100004488"
},
{
"Funder": "Fundação para a Ciência e a Tecnologia",
"DOI": "10.13039/501100001871"
},
{
"Funder": "Ministarstvo Prosvete, Nauke i Tehnološkog Razvoja",
"DOI": "10.13039/501100004564"
},
{
"Funder": "Ministarstvo Znanosti, Obrazovanja i Sporta",
"DOI": "10.13039/501100006588"
},
{
"Funder": "National Health and Medical Research Council",
"DOI": "10.13039/501100000925"
},
{
"Funder": "National Institutes of Health",
"DOI": "10.13039/100000002"
},
{
"Funder": "National Science Foundation",
"DOI": "10.13039/100000001"
},
{
"Funder": "Nederlandse Organisatie voor Wetenschappelijk Onderzoek",
"DOI": "10.13039/501100003246"
},
{
"Funder": "Research Councils",
"DOI": "10.13039/501100000690"
},
{
"Funder": "Schweizerischer Nationalfonds zur Förderung der wissenschaftlichen Forschung",
"DOI": "10.13039/501100001711"
},
{
"Funder": "Science Foundation Ireland",
"DOI": "10.13039/501100001602"
},
{
"Funder": "Wellcome Trust",
"DOI": "10.13039/100004440"
}
]

View File

@ -0,0 +1,458 @@
[
{
"category": "rda",
"name": "rda.dataset.data_quality_assurance"
},
{
"category": "rda",
"name": "rda.dataset.distribution.access_url"
},
{
"category": "rda",
"name": "rda.dataset.distribution.available_until"
},
{
"category": "rda",
"name": "rda.dataset.distribution.byte_size"
},
{
"category": "rda",
"name": "rda.dataset.distribution.data_access"
},
{
"category": "rda",
"name": "rda.dataset.distribution.description"
},
{
"category": "rda",
"name": "rda.dataset.distribution.download_url"
},
{
"category": "rda",
"name": "rda.dataset.distribution.format"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.availability"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.backup_frequency"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.backup_type"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.certified_with"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.description"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.geo_location"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.pid_system"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.storage_type"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.supports_versioning"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.title"
},
{
"category": "rda",
"name": "rda.dataset.distribution.host.url"
},
{
"category": "rda",
"name": "rda.dataset.distribution.license.license_ref"
},
{
"category": "rda",
"name": "rda.dataset.distribution.license.start_date"
},
{
"category": "rda",
"name": "rda.dataset.distribution.title"
},
{
"category": "rda",
"name": "rda.dataset.keyword"
},
{
"category": "rda",
"name": "rda.dataset.language"
},
{
"category": "rda",
"name": "rda.dataset.metadata.description"
},
{
"category": "rda",
"name": "rda.dataset.metadata.language"
},
{
"category": "rda",
"name": "rda.dataset.metadata.metadata_standard_id"
},
{
"category": "rda",
"name": "rda.dataset.metadata.metadata_standard_id.identifier"
},
{
"category": "rda",
"name": "rda.dataset.metadata.metadata_standard_id.type"
},
{
"category": "rda",
"name": "rda.dataset.personal_data"
},
{
"category": "rda",
"name": "rda.dataset.preservation_statement"
},
{
"category": "rda",
"name": "rda.dataset.security_and_privacy"
},
{
"category": "rda",
"name": "rda.dataset.security_and_privacy.description"
},
{
"category": "rda",
"name": "rda.dataset.security_and_privacy.title"
},
{
"category": "rda",
"name": "rda.dataset.sensitive_data"
},
{
"category": "rda",
"name": "rda.dataset.technical_resource.description"
},
{
"category": "rda",
"name": "rda.dataset.technical_resource.name"
},
{
"category": "rda",
"name": "rda.dataset.title"
},
{
"category": "rda",
"name": "rda.dataset.type"
},
{
"category": "rda",
"name": "rda.dataset.issued"
},
{
"category": "rda",
"name": "rda.dataset.dataset_id"
},
{
"category": "rda",
"name": "rda.dataset.dataset_id.identifier"
},
{
"category": "rda",
"name": "rda.dataset.dataset_id.type"
},
{
"category": "rda",
"name": "rda.dataset.description"
},
{
"category": "rda",
"name": "rda.dmp.contact"
},
{
"category": "rda",
"name": "rda.dmp.contact.contact_id.identifier"
},
{
"category": "rda",
"name": "rda.dmp.contact.contact_id.type"
},
{
"category": "rda",
"name": "rda.dmp.contact.mbox"
},
{
"category": "rda",
"name": "rda.dmp.contact.name"
},
{
"category": "rda",
"name": "rda.dmp.contributor"
},
{
"category": "rda",
"name": "rda.dmp.contributor.contributor_id.identifier"
},
{
"category": "rda",
"name": "rda.dmp.contributor.contributor_id.type"
},
{
"category": "rda",
"name": "rda.dmp.contributor.mbox"
},
{
"category": "rda",
"name": "rda.dmp.contributor.name"
},
{
"category": "rda",
"name": "rda.dmp.contributor.role"
},
{
"category": "rda",
"name": "rda.dmp.cost"
},
{
"category": "rda",
"name": "rda.dmp.cost.currency_code"
},
{
"category": "rda",
"name": "rda.dmp.cost.description"
},
{
"category": "rda",
"name": "rda.dmp.cost.title"
},
{
"category": "rda",
"name": "rda.dmp.cost.value"
},
{
"category": "rda",
"name": "rda.dmp.created"
},
{
"category": "rda",
"name": "rda.dmp.description"
},
{
"category": "rda",
"name": "rda.dmp.dmp_id"
},
{
"category": "rda",
"name": "rda.dmp.dmp_id.identifier"
},
{
"category": "rda",
"name": "rda.dmp.dmp_id.type"
},
{
"category": "rda",
"name": "rda.dmp.ethical_issues_description"
},
{
"category": "rda",
"name": "rda.dmp.ethical_issues_exist"
},
{
"category": "rda",
"name": "rda.dmp.ethical_issues_report"
},
{
"category": "rda",
"name": "rda.dmp.language"
},
{
"category": "rda",
"name": "rda.dmp.modified"
},
{
"category": "rda",
"name": "rda.dmp.project"
},
{
"category": "rda",
"name": "rda.dmp.project.description"
},
{
"category": "rda",
"name": "rda.dmp.project.end"
},
{
"category": "rda",
"name": "rda.dmp.project.funding"
},
{
"category": "rda",
"name": "rda.dmp.project.funding.funder_id.identifier"
},
{
"category": "rda",
"name": "rda.dmp.project.funding.funder_id.type"
},
{
"category": "rda",
"name": "rda.dmp.project.funding.funding_status"
},
{
"category": "rda",
"name": "rda.dmp.project.funding.grant_id.identifier"
},
{
"category": "rda",
"name": "rda.dmp.project.funding.grant_id.type"
},
{
"category": "rda",
"name": "rda.dmp.project.start"
},
{
"category": "rda",
"name": "rda.dmp.dmp.project.title"
},
{
"category": "rda",
"name": "rda.dmp.title"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isCitedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.cites"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isSupplementTo"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isSupplementedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isContinuedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.continues"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isDescribedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.describes"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.hasMetadata"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isMetadataFor"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isNewVersionOf"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isPreviousVersionOf"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isPartOf"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.hasPart"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isReferencedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.references"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isDocumentedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.documents"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isCompiledBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.compiles"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isVariantFormOf"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isOriginalFormof"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isIdenticalTo"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isAlternateIdentifier"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isReviewedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.reviews"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isDerivedFrom"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isSourceOf"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.requires"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isRequiredBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.isObsoletedBy"
},
{
"category": "zenodo",
"name": "zenodo.related_identifiers.obsoletes"
}
]

View File

@ -23,16 +23,16 @@ pdf.converter.url=http://localhost:3000/
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
configuration.externalUrls=externalUrls/ExternalUrls.xml
configuration.rda=RDACommonStandards.txt
configuration.semantics=Semantics.json
configuration.h2020template=documents/h2020.docx
configuration.h2020datasettemplate=documents/h2020_dataset.docx
configuration.configurable_login_providers=configurableLoginProviders.json
configuration.doi_funder=DOI_Funder.json
####################EMAIL FILE TEMPLATES OVERRIDES CONFIGURATIONS##########
email.invite=classpath:templates/email/email.html
email.confirmation=classpath:templates/email/emailConfirmation.html
email.merge=classpath:templates/email/emailMergeConfirmation.html
email.unlink=classpath:templates/email/emailUnlinkConfirmation.html
#############FACEBOOK LOGIN CONFIGURATIONS#########
facebook.login.clientId=
@ -78,8 +78,6 @@ conf_email.expiration_time_seconds=14400
conf_email.subject=OpenDMP email confirmation
#############ZENODO CONFIGURATIONS#########
zenodo.url=https://sandbox.zenodo.org/api/
zenodo.access_token=
zenodo.login.access_token_url=https://sandbox.zenodo.org/oauth/token
zenodo.login.client_id=
zenodo.login.client_secret=

View File

@ -23,16 +23,15 @@ pdf.converter.url=http://opendmp-pdf:3000/
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
configuration.externalUrls=externalUrls/ExternalUrls.xml
configuration.rda=RDACommonStandards.txt
configuration.h2020template=documents/h2020.docx
configuration.h2020datasettemplate=documents/h2020_dataset.docx
configuration.configurable_login_providers=configurableLoginProviders.json
configuration.doi_funder=DOI_Funder.json
####################EMAIL FILE TEMPLATES OVERRIDES CONFIGURATIONS##########
email.invite=classpath:templates/email/email.html
email.confirmation=classpath:templates/email/emailConfirmation.html
email.merge=classpath:templates/email/emailMergeConfirmation.html
email.unlink=classpath:templates/email/emailUnlinkConfirmation.html
email.dataset.template=classpath:templates/email/emailAdmin.html
####################INVITATION MAIL CONFIGURATIONS##############
@ -100,14 +99,11 @@ conf_email.expiration_time_seconds=14400
conf_email.subject=OpenDMP email confirmation
#############ZENODO CONFIGURATIONS#########
zenodo.url=https://sandbox.zenodo.org/api/
zenodo.access_token=
zenodo.login.access_token_url=https://sandbox.zenodo.org/oauth/token
zenodo.login.client_id=
zenodo.login.client_secret=
zenodo.login.redirect_uri=http://localhost:8080/login/external/zenodo
#############CONTACT EMAIL CONFIGURATIONS#########
contact_email.mail=
logging.config=classpath:logging/logback-${spring.profiles.active}.xml

View File

@ -17,7 +17,6 @@ pdf.converter.url=http://docsbox-web/
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
configuration.externalUrls=externalUrls/ExternalUrls.xml
configuration.rda=RDACommonStandards.txt
configuration.h2020template=documents/h2020.docx
configuration.h2020datasettemplate=documents/h2020_dataset.docx
configuration.configurable_login_providers=ConfigurableLoginProviders.json

View File

@ -49,16 +49,16 @@ elasticsearch.certKey=
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
configuration.externalUrls=externalUrls/ExternalUrls.xml
configuration.rda=RDACommonStandards.txt
configuration.semantics=Semantics.json
configuration.h2020template=documents/h2020.docx
configuration.h2020datasettemplate=documents/h2020_dataset.docx
configuration.configurable_login_providers=configurableLoginProviders.json
configuration.doi_funder=DOI_Funder.json
####################EMAIL FILE TEMPLATES OVERRIDES CONFIGURATIONS##########
email.invite=file:templates/email/email.html
email.confirmation=file:templates/email/emailConfirmation.html
email.merge=file:templates/email/emailMergeConfirmation.html
email.unlink=classpath:templates/email/emailUnlinkConfirmation.html
email.dataset.template=file:templates/email/emailAdmin.html
#############LOGIN CONFIGURATIONS#########
@ -111,7 +111,19 @@ database.lock-fail-interval=120000
##########################MISC##########################################
#############USER GUIDE#########
userguide.path=user-guide/
userguide.path=dmp-backend/web/src/main/resources/material/user-guide
#############ABOUT#########
about.path=dmp-backend/web/src/main/resources/material/about
#############TERMS OF SERVICE#########
termsofservice.path=dmp-backend/web/src/main/resources/material/terms-of-service
#############GLOSSARY#########
glossary.path=dmp-backend/web/src/main/resources/material/glossary
#############FAQ#########
faq.path=dmp-backend/web/src/main/resources/material/faq
#############NOTIFICATION#########
notification.rateInterval=30000

View File

@ -30,6 +30,43 @@
"type": "saml2",
"configurableLoginId": "",
"name": ""
"name": "",
"spEntityId": "",
"idpEntityId": "",
"idpUrl": "",
"idpMetadataUrl": "",
"idpArtifactUrl": "",
"binding": "Redirect",
"logoUrl": "",
"responseSigned": true,
"assertionSigned": true,
"assertionEncrypted": true,
"encryptionCert" : {
"alias": "",
"password": "",
"keystorePath": "encryptionkeystore.jks",
"keystorePassword": "",
"keyFormat": "JKS"
},
"signingCert" : {
"alias": "",
"password": "",
"keystorePath": "signingkeystore.jks",
"keystorePassword": "",
"keyFormat": "JKS"
},
"assertionConsumerServiceUrl": "",
"wantAssertionsSigned": true,
"authnRequestsSigned": true,
"usingFormat": "friendly_name",
"configurableUserFromAttributes": {
"email": "email",
"name": "givenName"
},
"attributeTypes": {
"email": "XSString",
"givenName": "XSString"
}
}
]
}

View File

@ -772,6 +772,8 @@ but not
<path>$['results'][*]['result']['metadata']['oaf:entity']['oaf:result']</path>
<fields>
<id>'originalId'</id>
<pid>pid.content</pid>
<pidTypeField>pid.classid</pidTypeField>
<name>'title'</name>
<count>'count'</count>
</fields>
@ -1017,6 +1019,8 @@ but not
<path>$['results'][*]['result']['metadata']['oaf:entity']['oaf:organization']</path>
<fields>
<id>'originalId'</id>
<pid>pid.content</pid>
<pidTypeField>pid.classid</pidTypeField>
<name>'legalname'</name>
<count>'count'</count>
</fields>
@ -1098,6 +1102,8 @@ but not
<fields>
<id>'originalId'</id>
<name>'title'</name>
<pid>pid.content</pid>
<pidTypeField>pid.classid</pidTypeField>
<count>'count'</count>
</fields>
</data>

View File

@ -0,0 +1,76 @@
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="content-type">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300&display=swap" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ENjdO4Dr2bkBIFxQpeoTz1HIcje39Wm4jDKdf19U8gI4ddQ3GYNS7NTKfAdVQSZe" crossorigin="anonymous"></script>
<style type="text/css">
h1 {
text-align: center;
margin: 2rem 0 1rem 0;
font-size: 3.3125rem;
line-height: 1.15;
font-weight: 300;
color: rgba(0,0,0,.87);
}
@media (min-width: 576px) {
.container {
max-width:540px
}
}
@media (min-width: 768px) {
.container {
max-width:720px
}
}
@media (min-width: 992px) {
.container {
max-width:960px
}
}
@media (min-width: 1244px) {
.container {
max-width:1204px!important
}
}
body {
font-family: Roboto,Helvetica,Arial,sans-serif;
font-size: 1rem;
font-weight: 300;
line-height: 1.5;
color: #212121;
text-align: left;
background: transparent;
}
</style>
</head>
<body>
<div class="container">
<div class="row">
<div class="col-md-12">
<h1>About</h1>
</div>
</div>
<div class="row">
<div class="col-md-12">
<p>ARGOS is an online tool in support of automated processes to creating, managing, sharing and linking DMPs with research artifacts they correspond to. It is the joint effort of OpenAIRE and EUDAT to deliver an open platform for Data Management Planning that addresses FAIR and Open best practices and assumes no barriers for its use and adoption. It does so by applying common standards for machine-actionable DMPs as defined by the global research data community of RDA and by communicating and consulting with researchers, research communities and funders to better reflect on their needs.
<br /><br />ARGOS provides a flexible environment and an easy interface for users to navigate and use.</p>
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,749 @@
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="content-type">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;500&display=swap" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ENjdO4Dr2bkBIFxQpeoTz1HIcje39Wm4jDKdf19U8gI4ddQ3GYNS7NTKfAdVQSZe" crossorigin="anonymous"></script>
<style type="text/css">
h3 {
font-size: 1.5625rem;
margin: 20px 0 10px;
line-height: 1.4em;
font-weight: 300;
}
h4 {
font-size: 1rem;
line-height: 1em;
font-weight: 500;
}
p {
text-align: left;
font-family: Roboto,sans-serif;
letter-spacing: 0;
color: #212121;
}
a {
text-decoration: none;
background-color: transparent;
}
a:hover {
color: #2e75b6;
}
p a {
color: #23bcba;
}
ul {
padding-left: 2.5rem;
}
.container-fluid {
margin: 0;
padding-left: 0;
}
.col-md-12 {
padding-right: 25px;
}
body {
font-family: Roboto,Helvetica,Arial,sans-serif;
font-size: 1rem;
font-weight: 300;
line-height: 1.5;
color: #212121;
text-align: left;
background: transparent;
}
</style>
</head>
<body>
<div class="container-fluid">
<div class="row">
<div class="col-md-12">
<h3>About ARGOS</h3>
<h4>What is ARGOS?</h4>
<p>Argos is an open and collaborative platform developed by <a href="https://www.openaire.eu/"
target="_blank">OpenAIRE</a> to facilitate
Research Data Management (RDM) activities concerning the implementation of Data
Management Plans. It uses OpenAIRE guides created by the <a
href="https://www.openaire.eu/task-forces-in-openaire-advance" target="_blank">RDM Task Force</a> to
familiarize users with basic RDM concepts and guide them throughout the process of
writing DMPs. It also utilises the OpenAIRE pool of services and inferred sources to
make DMPs more dynamic in use and easier to be completed and published. Argos is
based on the OpenDMP <a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
target="_blank">open source software</a> and is available through the <a
href="http://catalogue.openaire.eu/" target="_blank">OpenAIRE
Service catalogue</a> and the <a
href="https://marketplace.eosc-portal.eu/services/argos?fromc=data-management"
target="_blank">EOSC</a>.</p>
<br />
<h4>Is Argos designed for one specific funder, e.g. the EC/Horizon Europe?</h4>
<p>
Argos is a flexible tool, designed to accommodate all research performing
and research funding organisations policies and Research Data Management (RDM) needs.
It already supports templates for different authorities.
These templates are created by Admin users in Argos.
In addition, we currently work to provide non-administrative users with the capability
to modify templates according to their own needs.
</p>
<br />
<h4>Why use Argos?</h4>
<p>Argos is easy to use and navigate around. It familiarises users with the DMP process
and provides guidance on basic RDM concepts so that users find useful resources to
learn from without having to leave the Argos environment. Users can invite their
colleagues and collaboratively work on completing a DMP. Moreover, Argos is an
integral part of the OpeAIRE ecosystem and the <a href="https://zenodo.org/record/2643199#.X3HL0WgzY2x"
target="_blank">Research
Graph</a>. Argos integrates
other services of the ecosystem to enable contextualisation of information, which is
especially useful when data are re-used, for example to understand how/ if they can
be repurposed.</p>
<br />
<h4>Who is Argos for?</h4>
<p>Argos is designed as a tool for inclusive use by researchers, students, funders,
research communities and institutions. It can be used in the context of research
projects conduct to comply with funders RDM requirements, as a tool in support of
literacy programmes in academia or can be independently deployed to meet given
stakeholder demands. Also, it is available in native languages, thanks to the help
of OpenAIRE NOADs, which strengthens common understanding of all researchers
involved in the DMP writing process.
By using Argos, researchers and students are able to create their DMPs in
collaboration with other colleagues, learn basic RDM concepts throughout the process
and publish DMPs as outputs in an open and FAIR manner, among other things by
assigning DOIs and licenses and by maintaining DMPs as living documents through
versioning.
At the same time, Argos can be configured and deployed by funders, institutions and
research communities. They can plug in their own services and/ or make use of
OpenAIRE underlying services that Argos is built with ad-hoc.</p>
<br />
<h4>Using Argos</h4>
<p>
Argos consists of two main functionalities: DMPs and Datasets.
Argos can be used for:
<br /><br /><span style="padding:20px;">
A. Viewing/ consulting publicly released DMPs and Datasets or Projects
corresponding to
DMPs
</span><br /><br />
Argos offers options for publishing DMPs in two modes, private or public. To view
public DMPs and Datasets, there is no need for login to the platform.
<br /><br /><span style="padding:20px;">
B. Writing and publishing a DMP
</span><br /><br />
Argos helps researchers comply with mandates that may be attached to their grant
proposal/ project funding. They can therefore choose from the most suitable to their
needs template from the Datasets collection and proceed with answering the
corresponding questions. Once finalized, researchers can assign a DOI to their DMP,
publish and eventually cite it.
<br /><br /><span style="padding:20px;">
C. Practicing on writing DMPs and Dataset Descriptions
</span><br /><br />
Argos may be used for educational purposes. The process of Data Management Planning
reflects the data management lifecycle, hence the tool can be used in response to
global RDM training demands. Examples may refer to embedding DMPs and DMP tools in
specific curricula or be embedded in library instructions sessions to familiarize
researchers and students the processes of RDM and DMP.
</p>
<br />
<h4>Can I exploit ARGOS DMPs?</h4>
<p>
Of course. If you want to compare DMPs or analyse DMP data, then we advise you to export the records in .xml.
This schema is the most complete as it includes all information held in a DMP: information provided by the Admin
when structuring the template and input provided by researchers when completing their DMPs.
</p>
<br />
<h3>Manage Account</h3>
<h4>Log in and out of Argos</h4>
<p>
You can log in Argos by selecting one of the providers from the Login page. Argos
does not require Sign Up.
</p>
<br />
<h4>Create an administrator account</h4>
<p>
If you are interested in becoming an administrator in Argos and benefit from extra
features relevant to creating tailored templates, please email <a href="mailto:argos@openaire.eu"
target="_blank">argos@openaire.eu</a> .
</p>
<br />
<h4>
Switch from administrator account
</h4>
<p>
There is no need to switch from your administrator account to use Argos. The only
difference between regular users and administrators profiles in Argos is an extra
set of tools at the bottom of the main tool bar that is positioned on the left
handside.
</p>
<br />
<h4>
Change your email
</h4>
<p>
Argos does not have Sign Up. To change email, please see “Switch between accounts”.
Alternatevily, you can add more email addresses to your user account by selecting
the “Add alternative email” from your profile.
</p>
<br />
<h4>
Switch between accounts
</h4>
<p>
You can switch between email accounts by loging in with different providers from the
Login page. The change depends on whether you have used different email addresses to
sign up with those providers. On the occassion that only one email address is used
for all providers offered by Argos, then no change is expected. You can always add
new email accounts in your profile from the “Add alternative email” in your profile
page.
</p>
<br />
<h4>
Delete your account
</h4>
<p>
If you want to delete your Argos profile, please email <a href="mailto:argos@openaire.eu"
target="_blank">argos@openaire.eu</a> .
</p>
<br />
<h3>
Accounts access and safety
</h3>
<h4>
How can I access my account and edit my profile?
</h4>
<p>
You can access your profile page and make desired edits from clicking on the avatar
at the very top of the toolbar located on the right handside.
</p>
<br />
<h4>
Cant login to ARGOS
</h4>
<p>
Please try using a different provider from the Login page and contact us at:
<a href="mailto:argos@openaire.eu" target="_blank">argos@openaire.eu</a> .
</p>
<br />
<h4>
Accessing Argos
</h4>
<p>
If you are reading this right now, you probably know the answer already! One way to
access Argos is through the <a href="http://catalogue.openaire.eu/" target="_blank">OpenAIRE Service
catalogue</a>. Another way is through the
<a href="https://marketplace.eosc-portal.eu/services/argos?fromc=data-management" target="_blank">EOSC
Catalogue</a>. But, you can always find Argos at
argos.openaire.eu .
To access Argos software, please visit
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
target="_blank">https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master</a>
.
</p>
<br />
<h3>Argos User Roles</h3>
<h4>
Who is the author of a DMP?
</h4>
<p>
Author of the DMP is everyone contributing to writing the DMP. Both Argos owners and
Argos members are DMP authors. Researchers, however, are not DMP authors.
</p>
<br />
<h4>
What is the difference between owners and
members?
</h4>
<p>
Argos DMP owner is the person initiating the DMP. People who are invited to join the
DMP process are members who contribute to writing the DMP. DMP owners have extra
editing rights and they are the ones to finalize the DMP process. Members can view
and edit DMPs and Datasets, but can not perform further actions for its validation
or finalization.
</p>
<br />
<h4>
What is the role of a researcher in Argos?
</h4>
<p>
Researchers in Argos are project contributors and usually those who own or have
managed data described in respective DMPs.
</p>
<br />
<h4>
Can a researcher be a DMP author?
</h4>
<p>
Of course! This depends on whether the researcher has also been involved in the DMP
writing process.
</p>
<br />
<h4>
What does an Admin user do?
</h4>
<p>
Not everyone can become an Admin user in Argos. This happens upon request at
<a href="mailto:argos@openaire.eu" target="_blank">argos@openaire.eu</a>. Admin users are able to create
their own tailored templates from
a specialised editor, configure their own APIs and integrate services with Argos in
collaboration with and support of the Argos development team. Fees may apply
according to the type of requests.
</p>
<br />
<h3>Creating DMPs</h3>
<h4>
I cant find my project in the list. What should
I do?
</h4>
<p>
DMPs that are created as part of the project proposal are not included in Argos.
Only accepted project proposals are listed in the platform. If you cant find your
project in the list (drop-down menu), please use the “Insert manually”
functionality.
</p>
<br />
<h4>
I cant find my grant in the list. What should I
do?
</h4>
<p>
If you cant find your grant in the list (drop-down menu), please use the “Insert
manually” functionality.
</p>
<br />
<h4>
How do I edit and design my own DMP
template?
</h4>
<p>
You have to be an Admin user to design your own template in Argos. To learn more
about Admin users, check “What does an Admin user do?”.
</p>
<br />
<h4>
Can I create my own templates in Argos?
</h4>
<p>
Yes, you can, provided that you are an Admin user. To learn more about Admin users,
check “What does an Admin user do?”.
</p>
<br />
<h4>
What is the difference between “Save”, “Save &
Close”, “Save & Add New”?
</h4>
<div>
<p>They all perform the same action, but the difference lies in where you are directed
after you have saved your DMP or Dataset.</p>
<ul>
<li>
When choosing Save, information that you have added in the editor is kept
and you
can continue adding more from the same page you were working on.
</li>
<li>
When choosing Save & Close, information that you have added is kept, but the
editors window closes and you are redirected to your dashboard.
</li>
<li>
[only for datasets] When choosing Save & Add New, information that you have
added is
kept, and you are redirected to another editor to start a new dataset.
</li>
</ul>
</div>
<br />
<h4>
Can I modify things once I have finalized
them?
</h4>
<p>
Yes, you can, as long as you havent assigned a DOI to your DMP. You just select
“Undo Finalization”.
</p>
<br />
<h4>
How do I invite collaborators?
</h4>
<p>
You may use the “Invite” button to share DMPs with your colleagues and start working
on them together.
</p>
<br />
<h4>
Can scientists collaborate on the same DMP even though they may belong to different institutions (e.g. a hospital, a University, etc, collaborating on a project) and the dataset also "belongs" to different institutions?
</h4>
<p>
Of course. Argos supports collaborations across diverse teams. There are two most frequent ways that can address this question:
<br /><br /><span style="padding:20px;">
A. Everyone works on the same DMP, but on different dataset descriptions
</span><br /><br />
In this case, each organisation makes its own dataset description(s) in a single DMP.
That means that the manager (i.e. person responsible for the DMP activity) creates a DMP in ARGOS
and shares it with everyone. If the DMP is shared with co-ownership rights,
then the people will be able to edit it and add their dataset descriptions at any time during the project.
If there is the need to control editing rights of people writing the DMPs, then the manager can create the dataset description(s)
and share these each time with the team members that are responsible for adding input for the specified datasets.
<br /><br /><span style="padding:20px;">
B. Everyone works on their own DMP and content is later merged into one single DMP
</span><br /><br />
In this case, each organisation might work on their own DMP for the same project.
At one point, you need to decide which DMP is going to be the core for the work you perform, share co-ownership
between managers of all DMPs so they can copy all dataset descriptions of their DMPs in this single DMP document.
</p>
<br />
<h4>
How do I create an identical DMP or Dataset as a
copy?
</h4>
<p>
DMPs and Datasets can be cloned and used in different research contexts.
Existing DMPs presenting similarities with new ones, can be cloned, changed name and
then edited according to the new project data requirements.
Existing Datasets can be cloned and used in new DMPs that are reusing data described
in their context.
</p>
<br />
<h4>
What is the DMP version? How is it set?
</h4>
<p>
Versioning in Argos is both an internal and an external process. That means that
versioning happens both in the Argos environment when editing the DMP, and outside
of Argos when a DMP output is published in Zenodo. At every stage of the DMP
lifecycle, users have the option of keeping versions of the DMPs they are editing.
In Argos, users can create new versions of their DMPs by selecting the “Start New
Version” option to keep track of the evolution of their DMP throughout the writing
process. When published, versioning is associated with a DOI. Published DMPs are
automatically versioned every time a newer version of the same output is uploaded in
Zenodo.
</p>
<br />
<h3>
DMPs and Datasets
</h3>
<h4>
What is the DMP?
</h4>
<p>
A DMP in Argos consists of vital information about the research project on behalf of
which the DMP is created and of more in depth information about the management,
handling and curation of datasets collected, produced or reused during the research
lifetime. A DMP in Argos accommodates documentation of more than one datasets. That
way datasets are provided with the flexibility to be described separately, following
different templates per type of dataset or research community concerned each time,
also possible to be copied and used in multiple DMPs. Datasets are then bundled up
in a DMP and can be shared more broadly. Special attention is given to the handling
of data that are being re-used via OpenAIRE APIs.
</p>
<br />
<h4>
How do I find which Dataset template to use?
</h4>
<p>
This depends on the reason why you are creating a DMP in the first place. If it is
for compliance matters with funders, institutions or research communities RDM
policies, then you may select the dataset template of that particular stakeholder.
If you are creating a DMP for training purposes, you may select and work on any
template from the Argos collection.
</p>
<br />
<h4>
How do I create my own Dataset template?
</h4>
<p>
Currently, it is not possible for all Argos users to create dataset templates of
their own, so they have to work on predefined templates. Additional rights for
editing Dataset templates according to tailored needs have Admin users. This is
expected to change in the near future. To learn more about Admin users, check “What
does an Admin user do?”.
</p>
<br />
<h4>
Can I create smaller versions of a template for project proposals?
</h4>
<p>
Yes, it is possible in Argos to create short versions of templates that can be used
for grant proposals, such as for Horizon Europe.
If you are interested in working with us to create this short version of any ARGOS template,
please contact us: <a href="mailto:argos@openaire.eu" target="_blank">argos@openaire.eu</a>.
</p>
<br />
<h4>
Can I customise an existing template (e.g. for a specific institution)?
</h4>
<p>
Yes, you can. In the current version, this is possible for Admin users who have their own deployment on-premises or cloud.
Please note that this subject to change in the near future as we are working on a feature that will allow all users
to customise (remove/add/extend) specific questions on the template they are working on.
</p>
<br />
<h4>
What is a Dataset?
</h4>
<p>
A Dataset in Argos is an editor with set up questions that support the creation of
descriptions of how data are / have been handled, managed and curated throughout the
research data lifecycle. The editor holds a collection of Dataset templates each one
with different sets of predefined questions as per funders, institutions, research
communities RDM policy requirements. Researchers and students can choose the
template that corresponds to their RDM needs in order to get funding or get their
degree, respectively. A DMP in Argos may consist of one or more datasets.
</p>
<br />
<h4>
Why do I need more than one Dataset?
</h4>
<p>
You dont necessarily need to have many Datasets in a DMP. However, you might be
producing a plethora of data during your research that are diverse in type and/ or
scope of collection/ re-use, thus presenting diverse management needs. Argos gives
you the flexibility to describe all data produced and/ or re-used in your research
separately. That way it is easy to perform the mapping of information provided in a
DMP to the respective data types or data collections they correspond to. Equally,
reuse of particular datasets in different DMPs is easier. For the latter, please
check “How do I create an identical DMP or Dataset as a copy?”.
</p>
<br />
<h4>
It is not very clear to me when one should choose to add a dataset or to describe several "data products" in the same description.
</h4>
<p>
This is something that has to be tackled conceptually, from the author of the DMP.
If those "products" have their own lifetime and rules (IPR, Access rights, etc), they should be described as different datasets.
Alternative formats should not be treated as different datasets, unless they have other differences due to the format, too.
But, for instance, if you have datasets in CSV and JSON formats and under the same terms, they could be seen as one dataset description in the DMP.
</p>
<br />
<h4>
Can I add to my DMP information about datasets published on Zenodo?
</h4>
<p>
Argos offers a search to Zenodo for prefilling the DMP you are working with dataset metadata.
This search has been developed according to the rules set by Zenodo
and therefore has the same behaviour as when you are using the search bar on the Zenodo interface.
However, we understand the need to be able to find records with their PID,
hence we introduced some changes and now support searching DOIs from the Argos interface.
</p>
<br />
<h4>
Is it possible to describe a dataset that is not yet in a repository?
</h4>
<p>
Of course! You can choose to manually describe your dataset, e.g. for a dataset you are planning to produce,
instead of pre-filling the template with available metadata from a dataset that has already been shared and preserved in a repository.
</p>
<br />
<h4>
What are public DMPs and Datasets?
</h4>
<p>
Public DMPs and Public Datasets are collections of openly available Argos outputs.
That means that DMP owners and members are making their DMP and/or Dataset outputs
available to all Argos and non-Argos users who might want to consult or re-use them
under the framework provided by the assigned DMP license. Please also check “Is all
the information I create visible by default?”.
</p>
<br />
<h4>
Is all information I create visible by
default?
</h4>
<p>
No, it is not. You can choose how your DMP is displayed in Argos from the
“Visibility” option. Choosing Public will immediately locate your DMP in the “Public
DMPs” collection and make it available to all Argos and non-Argos users.
Choosing Private will keep the DMP visible only to you and to the people invited to
edit the DMP in collaboration with you. Private DMPs are not publicly displayed to
other users.
</p>
<br />
<h4>
What is the difference between the DMP and the dataset export?
</h4>
<p>
DMP export contains all vital information for a DMP, including funding and dataset details,
while dataset export is a subset of the DMP export containing information only about a dataset described in the DMP.
Both DMP and Dataset exports are available in .pdf, .docx, .xml.
In addition, DMP export is available in the RDA .json format
to increase interoperability of ARGOS exchanged DMPs.
</p>
<br />
<h4>
Is there a storage allowance limitation for the
DMPs and Dataset files?
</h4>
<p>
No, there is no storage limit or fee for either files stored in Argos.
</p>
<br />
<h3>
Publishing DMPs
</h3>
<h4>
Is it possible to publish DMPs in different repositories (so not Zenodo)?
</h4>
<p>
Yes, it is possible.
But, to have different repositories attached to the system, you will need your own on-premises or cloud deployment.
We are already working on that for DSpace and Dataverse repositories.
</p>
<br />
<h4>
Do you know that Zenodo has empty DMPs from ARGOS?
</h4>
<p>
Yes, we are aware of that.
Argos has no control over the DMPs that you generate and publish and thus can not be held accountable for empty DMPs.
Please remember that, as on all other occasions where you publish content, you should do so responsinbly.
If you have any questions regarding publishing DMPs, dont hesitate to contact us at <a href="mailto:argos@openaire.eu" target="_blank">argos@openaire.eu</a>.
</p>
<br />
<h4>
Once I upload a final version of a DMP to Zenodo, do I need to update this first final version from Zenodo or from Argos?
</h4>
<p>
Both options are possible according to how you have deposited the DMP in the first place.
If you have deposited your DMP with a token (i.e. from the ARGOS account on Zenodo),
then you wont have editing rights on the Zenodo record, but you will still be able to make changes
on ARGOS by starting and depositing a new version of the published DMP.
However, if you have deposited your DMP using your own account on Zenodo (i.e. login to Zenodo with your own credentials),
then you are able to also make minor changes, e.g. on the title of the DMP, directly from the Zenodo interface.
</p>
<br />
<h3>Troubleshooting</h3>
<h4>
Cant finalize a DMP
</h4>
<p>
You might be experiencing this problem because there are incomplete mandatory fields
in your DMP. Please check for those fields, fill in with appropriate information and
try again. Should the problem persists, please contact <a href="mailto:argos@openaire.eu"
target="_blank">argos@openaire.eu</a> .
</p>
<br />
<h4>
Cant co-edit a DMP
</h4>
<p>
DMPs can be shared with many colleagues in support of collaborative writing, but
DMPs should be worked by one person at a time. Argos will inform you if another
colleague has the DMP you are trying to edit open, so that your team avoids
information loss.
</p>
<br />
<h4>
Deposit is not working
</h4>
<p>
You need to have a Zenodo login to perform a deposit. Please sign up in Zenodo or
use the token option to publish your DMPs and get a DOI.
</p>
<br />
<h3>Legal and privacy</h3>
<h4>
Is Argos open source?
</h4>
<p>
Yes, it is. The OpenDMP software that Argos has deployed upon is open source code
available under Apache 2.0 license. You may find more information about the software
<a href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
target="_blank">here</a>.
</p>
<br />
<h4>
Can I contribute to Argos development?
</h4>
<p>
Of course! Please feel free to suggest new features and to actively contribute to
Argos development via pull requests in <a
href="https://code-repo.d4science.org/MaDgiK-CITE/argos/src/branch/master"
target="_blank">Gitea</a>.
</p>
<br />
<h4>
Is Argos GDPR compliant?
</h4>
<p>
Argos takes all necessary steps in handling and protecting personal and sensitive
information. Please check the <a href="https://argos.openaire.eu/terms-and-conditions"
target="_blank">Argos Terms of Service and Privacy Policy</a>.
</p>
<br />
<h4>
Which is the Argos data policy?
</h4>
<p>
Please find all information about Argos Terms of Service and Privacy, <a
href="https://argos.openaire.eu/terms-and-conditions" target="_blank">here</a>.
Additionally, you may find Argos Cookies policy, <a href="https://argos.openaire.eu/cookies-policy"
target="_blank">here</a>.
</p>
<br />
<h4>
What is the work ownership of information
created in Argos?
</h4>
<p>
Unless there are any contractual or institutional agreements stating ownership of
outputs produced in the context of a project/ collaboration, owners of Argos outputs
are DMP contributors, i.e. DMP owners and DMP members, who have been involved with
writing the DMP.
</p>
<br />
<h4>
Which are the terms and policies of Argos?
</h4>
<p>
Please find all information about Argos Terms of Service and Privacy, <a
href="https://argos.openaire.eu/terms-and-conditions" target="_blank">here</a>.
Additionally, you may find Argos Cookies policy, <a href="https://argos.openaire.eu/cookies-policy"
target="_blank">here</a>.
</p>
<!-- <h4>What is ARGOS?</h4>
<p>ARGOS is an open and collaborative platform for creating Data Management Plans according to funders or institutions open science policy requirements. ARGOS technology provides solutions and workflows that connect the DMP to the actual data where they are stored and link to other useful information such as publications and funding information, thus enabling the association between research outputs and processes and leading to the creation of coherent/ complete research entities. ARGOS is comprised of two major features: the ARGOS template and the Dataset Description.</p>
<br />
<h4>Who is it for?</h4>
<p>ARGOS is inclusive to all researchers and research coordinators who may use the tool to create machine actionable DMPs. Funding and Research Performing Organizations as well as research communities may use the tool and create Dataset Description templates according to their preferences or requirements. ARGOS may be used for purposes other than research projects, such as on the occasion of trainings that familiarise scientists with the data management planning process.</p>
<br />
<h4>How can I use it?</h4>
<p>ARGOS is comprised of two main functionalities: DMP templates and Dataset Descriptions. Additional entities are Projects that link to funders and grants information.<br />ARGOS can be used for:
<br /><br /><u style="padding:20px;"> A. viewing/ consulting publicly released DMPs and Dataset Descriptions or Projects corresponding to DMPs</u><br /><br />
The tool offers options for publishing DMPs in two modes, private or public. To view public DMPs and Dataset Descriptions, there is no need for login to the platform.
<br /><br /><u style="padding:20px;"> B. writing and publishing a DMP</u><br /><br />
The tool helps researchers comply with mandates that may be attached to their grant proposal/ project funding. They can therefore choose from the most suitable to their needs template from the Dataset Descriptions collection and proceed with answering the corresponding questions. Once finalized, researchers can assign a DOI to their DMP, publish and eventually cite it.
<br /><br /><u style="padding:20px;"> C. practicing in writing DMPs and Dataset Descriptions</u><br /><br />
Given that Data Management Planning reflects the data management lifecycle and in accordance/ response to the increasing demand of the global scientific/ research community for training in Research Data Management (RDM), ARGOS may be used for educational purposes. Examples may refer to embedding DMP and DMP tools in specific curricula or even utilization of the tool for researchers and students familiarization with the concept and process, as part of library instructions sessions.
</p>
<br />
<h4>What is the difference between the “Wizard” and the “Expert” modes/ features?</h4>
<p>There are two ways of creating a DMP: the “Wizard” and the “Expert”. The DMP Wizard combines the most necessary fields of the DMP template and the Data Description template. It is an easy way of starting a DMP and completing a Dataset Description. The downside when using the Wizard is that it only supports one Dataset Description. To add more datasets documentation, someone must open the DMP from DMP Expert.
<br />DMP expert contains extra fields for describing the project, grant, funding, contributors and associations between DMP authors, etc. DMP Expert is advised for use when further modification and personalization is to take place.
</p> -->
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,94 @@
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="content-type">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;500&display=swap" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ENjdO4Dr2bkBIFxQpeoTz1HIcje39Wm4jDKdf19U8gI4ddQ3GYNS7NTKfAdVQSZe" crossorigin="anonymous"></script>
<style type="text/css">
h1 {
text-align: center;
margin: 2rem 0 1rem 0;
font-size: 3.3125rem;
line-height: 1.15;
font-weight: 300;
color: rgba(0,0,0,.87);
}
h4 {
font-size: 1rem;
line-height: 1em;
font-weight: 500;
color: rgba(0,0,0,.87);
}
@media (min-width: 576px) {
.container {
max-width:540px
}
}
@media (min-width: 768px) {
.container {
max-width:720px
}
}
@media (min-width: 992px) {
.container {
max-width:960px
}
}
@media (min-width: 1244px) {
.container {
max-width:1204px!important
}
}
body {
font-family: Roboto,Helvetica,Arial,sans-serif;
font-size: 1rem;
font-weight: 300;
line-height: 1.5;
color: #212121;
text-align: left;
background: transparent;
}
</style>
</head>
<body>
<div class="container">
<div class="row">
<div class="col-md-12">
<h1>Glossary</h1>
</div>
</div>
<div class="row">
<div class="col-md-12">
<h4>DMP</h4>
<p>A DMP - short for Data Management Plan - is a document describing the processes that the data have undergone and the tools used for their handling and storage during a research lifecycle. Most importantly, DMPs secure provenance and enable re-use of data by appointing data managers and by including information on how data can be re-used by others in the future. Therefore, a DMP is a living document which is modified according to the data developments of a project before its completed and handed over at the end of the project.
Public funders increasingly contain DMPs in their grant proposals or policy funding requirements. A good paradigm is the European Commission demands for the production and delivery of DMPs for projects funded under the Horizon 2020 Funding Programme. On that note, and to encourage good data management practices uptake, many European institutions include DMPs in post-graduate researchers policies and offer relevant support to staff and students.
</p>
<h4>DMP template</h4>
<p>DMP template contains general but vital information about the name and the duration of the project that the DMP corresponds to, the contributing organisations and individuals as well as the datasets that are under the Dataset Description section. It also offers the possibility of describing datasets other than primary data generated, under “External References” section. A DMP template can have many Dataset Descriptions.
</p>
<h4>Dataset Description</h4>
<p>Dataset Description documents the management processes of datasets following funders or institutions requirements. A dataset description is essentially a questionnaire template with underlying added value services for interoperability and machine readability of information which is developed based on the given requirements. Management requirements differ from funder to funder and from institution to institution, hence the growing collection of Dataset Descriptions to select from.
Moreover, a Dataset Description links to the documentation of one dataset, hence a DMP template may contain more than one dataset descriptions on the occasion when multiple datasets were used during the project. When documentation of some of the projects datasets falls under additional requirements (e.g. projects receiving multiple grants from different sources), there is the possibility of describing datasets with more than one Dataset Description template.
</p>
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,115 @@
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="content-type">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Roboto:wght@300&display=swap" rel="stylesheet">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ENjdO4Dr2bkBIFxQpeoTz1HIcje39Wm4jDKdf19U8gI4ddQ3GYNS7NTKfAdVQSZe" crossorigin="anonymous"></script>
<style type="text/css">
h1 {
text-align: center;
margin: 2rem 0 1rem 0;
font-size: 3.3125rem;
line-height: 1.15;
font-weight: 300;
color: rgba(0,0,0,.87);
}
ol {
padding-left: 2.5rem;
}
a {
color: #6aa4d9;
text-decoration: none;
background-color: transparent;
}
a:hover {
color: #2e75b6;
}
p a {
color: #23bcba;
}
@media (min-width: 576px) {
.container {
max-width:540px
}
}
@media (min-width: 768px) {
.container {
max-width:720px
}
}
@media (min-width: 992px) {
.container {
max-width:960px
}
}
@media (min-width: 1244px) {
.container {
max-width:1204px!important
}
}
body {
font-family: Roboto,Helvetica,Arial,sans-serif;
font-size: 1rem;
font-weight: 300;
line-height: 1.5;
color: #212121;
text-align: left;
background: transparent;
}
</style>
</head>
<body>
<div class="container terms-component">
<div class="row">
<div class="col-md-12">
<h1>Terms of Service</h1>
</div>
</div>
<div class="row">
<div class="col-md-12">
<p><span>The </span><span>OpenDMP</span><span>&nbsp;service was developed to provide a more flexible, </span><span>collaborative </span><span>environment with machine actionable solutions in writing, sharing and publishing Data Management Plans (DMPs). It is a product of </span><span>cooperation between </span><span>OpenAIRE </span><span>AMKE</span><span class="c0">&nbsp;and EUDAT CDI and is offered both as a software &ldquo;OpenDMP &#39;&#39; and as an online service under the name &ldquo;ARGOS&rdquo;. </span></p>
<p><span></span></p>
<ol>
<li><span><a href="https://code-repo.d4science.org/MaDgiK-CITE/argos">OpenDMP software</a></span><span>&nbsp;is offered under the Free Open Source Software license &nbsp;</span><span>Apache 2.0</span><span class="c0">, for further development and use by institutions and interested parties.</span></li>
<li><span><a href="https://argos.openaire.eu/">ARGOS</a></span><span>&nbsp;service</span><span>&nbsp;is offered by</span><span>&nbsp;</span><span>OpenAIRE</span><span>&nbsp;as </span><span>part of its mission to support Open Science in the European Research Area, focusing on information linking and contextualisation that enriches its </span><span class="c5"><a href="https://zenodo.org/record/2600275#.XZpJgUYzY2w">Research Graph</a></span><span>.</span><span class="c0">&nbsp;Use of ARGOS denotes agreement with the following terms:</span>
<ol>
<li><span>ARGOS is a software interface and a database with no storage capacity to store or preserve research data. The DMPs created are hosted in the </span><span>OpenAIRE </span><span>production environment for the sole purpose of exposing the DMP records once finalised (&ldquo;published&rdquo;). If assigned a DOI, the DMP records are linked to and preserved in Zenodo, the OpenAIRE&rsquo;s repository service. The ARGOS service is made available for use free-of-charge for research, educational and informational purposes.</span></li>
<li><span>Login to ARGOS is possible through a variety of external providers, among which Google, Facebook, Twitter, B2Access </span><span>and OpenAIRE Login</span><span>, that share information of their user profiles with ARGOS, </span><span>according to the rights that have been granted to the given provider by the user.</span><span>&nbsp;External email addresses that are used in invitations for collaborations are held in ARGOS database that stores information about only the </span><span class="c11">name</span><span>, </span><span class="c11">surname</span><span>&nbsp;</span><span>and </span><span class="c11">email address</span><span>&nbsp;</span><span>of the DMP creator and collaborator. &nbsp;Personal data is collected via the login option and via email invitations sent to external DMP contributors. This personal information as well as the activity of ARGOS users is used only for deriving usage metrics and assessing the service quality. They are stored in ARGOS database for as long as the account is active and they are accessible only from people in the team in charge of </span><span>quality and risk assessment</span><span>. </span><span>They will not be used for other purposes other than the ones stated in this document and they can be deleted at any time should the user claim a relevant request. </span><span>The aforementioned processes are also facilitated by the use of c</span><span>ookies</span><span>&nbsp;(see below the &ldquo;Cookie policy&rdquo;). </span></li>
<li><span>Data concerning DMP information will be used by OpenAIRE for research and development </span><span>purposes, </span><span>such as identifying DMP models, and for ensuring compliance with policy requirements and monitoring of DMPs uptake linked to OpenAIRE&rsquo;s Monitoring Dashboards and the Open Science Observatory.</span></li>
<li><span>The DMP Manager</span><span>, i.e. the person who creates and manages a DMP, and/ or the </span><span>contributor, i.e. the person who is invited to collaborate on a DMP, shall ensure that content is accurate and presented in a way that adheres to these </span><span>Terms of Service</span><span>&nbsp;and applicable laws, including, but not limited to, privacy, data protection and intellectual property rights.</span></li>
<li><span>ARGOS service is provided by OpenAIRE &ldquo;as is&rdquo;. Although OpenAIRE and its partners take measures for the availability, dependability, and accuracy of the service, access to ARGOS, utilisation of its features and preservation of the data deposited or produced by the service are not guaranteed. OpenAIRE cannot be held responsible </span><span>for any data loss regarding DMPs,</span><span>&nbsp;ethical or financial damage or any other direct or indirect impact that any failure of ARGOS service may have on its users. &nbsp;</span></li>
<li><span>ARGOS </span><span>users are exclusively responsible for their use of content, and shall hold OpenAIRE free and harmless in connection with their download and/or use.</span></li>
<li><span>OpenAIRE may not be held responsible for the content provided </span><span>or statements</span><span>&nbsp;made in Data Management Plans created and managed by its users. </span></li>
<li><span>All content is provided &ldquo;as-is&rdquo;. Users of content (&ldquo;Users&rdquo;) shall respect applicable license conditions. Download and use of content from ARGOS does not transfer any intellectual property rights in the content to the User.</span></li>
<li><span>In the case any content is reported as violating third party rights or other legal provisions, ARGOS reserves the right to remove the content from the service until the dispute is legally settled. Any such incidents should be reported at </span><span class="c5"><a href="mailto:noticeandtakedown@openaire.eu">noticeandtakedown@openaire.eu</a></span><span class="c0">&nbsp;</span></li>
<li><span>ARGOS users are held responsible for the data and information they provide in the service. Users may not add information, data or any other type of artifact that may be </span><span>malicious</span><span>, intentionally erroneous and potentially harmful for other ARGOS users, IPR owners and/or the general public.</span></li>
<li><span>In case a user of ARGOS identifies a potential </span><span>infringement</span><span>&nbsp;of copyright, </span><span>harmful</span><span>&nbsp;or </span><span>malicious </span><span>operation, function, code, information or data, shall inform OpenAIRE providing sufficient evidence for the identification of the case and the information and/or data challenged.</span></li>
<li><span>OpenAIRE reserves the right, without notice, at its sole discretion and without liability, (i) to alter or delete inappropriate content, and (ii) to restrict or remove User access where it considers that use of ARGOS interferes with its operations or violates these Terms of Service or applicable laws.</span></li>
<li><span>These Terms of Service are subject to change by OpenAIRE at any time and without notice, other than through posting the updated Terms of Service on the OpenAIRE website and indicating the version and date of last update.</span></li>
</ol>
</li>
</ol>
<p><span>For any questions or comments you may have about the current Terms of Service, please contact us: </span><span class="c5"><a href="mailto:argos@openaire.eu">argos@openaire.eu</a></span><span class="c0">&nbsp;</span></p>
</div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,304 @@
<!doctype html>
<html>
<head>
<meta name="viewport" content="width=device-width" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Simple Transactional Email</title>
<style>
/* -------------------------------------
GLOBAL RESETS
------------------------------------- */
img {
border: none;
-ms-interpolation-mode: bicubic;
max-width: 100%; }
body {
background-color: #f6f6f6;
font-family: sans-serif;
-webkit-font-smoothing: antialiased;
font-size: 14px;
line-height: 1.4;
margin: 0;
padding: 0;
-ms-text-size-adjust: 100%;
-webkit-text-size-adjust: 100%; }
table {
border-collapse: separate;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
width: 100%; }
table td {
font-family: sans-serif;
font-size: 14px;
vertical-align: top; }
/* -------------------------------------
BODY & CONTAINER
------------------------------------- */
.body {
background-color: #f6f6f6;
width: 100%; }
/* Set a max-width, and make it display as block so it will automatically stretch to that width, but will also shrink down on a phone or something */
.container {
display: block;
Margin: 0 auto !important;
/* makes it centered */
max-width: 580px;
padding: 10px;
width: 580px; }
/* This should also be a block element, so that it will fill 100% of the .container */
.content {
box-sizing: border-box;
display: block;
Margin: 0 auto;
max-width: 580px;
padding: 10px; }
/* -------------------------------------
HEADER, FOOTER, MAIN
------------------------------------- */
.main {
background: #ffffff;
border-radius: 3px;
width: 100%; }
.wrapper {
box-sizing: border-box;
padding: 20px; }
.content-block {
padding-bottom: 10px;
padding-top: 10px;
}
.footer {
clear: both;
Margin-top: 10px;
text-align: center;
width: 100%; }
.footer td,
.footer p,
.footer span,
.footer a {
color: #999999;
font-size: 12px;
text-align: center; }
/* -------------------------------------
TYPOGRAPHY
------------------------------------- */
h1,
h2,
h3,
h4 {
color: #000000;
font-family: sans-serif;
font-weight: 400;
line-height: 1.4;
margin: 0;
Margin-bottom: 30px; }
h1 {
font-size: 35px;
font-weight: 300;
text-align: center;
text-transform: capitalize; }
p,
ul,
ol {
font-family: sans-serif;
font-size: 14px;
font-weight: normal;
margin: 0;
Margin-bottom: 15px; }
p li,
ul li,
ol li {
list-style-position: inside;
margin-left: 5px; }
a {
color: #3498db;
text-decoration: underline; }
/* -------------------------------------
BUTTONS
------------------------------------- */
.btn {
box-sizing: border-box;
width: 100%; }
.btn > tbody > tr > td {
padding-bottom: 15px; }
.btn table {
width: auto; }
.btn table td {
background-color: #ffffff;
border-radius: 5px;
text-align: center; }
.btn a {
background-color: #ffffff;
border: solid 1px #3498db;
border-radius: 5px;
box-sizing: border-box;
color: #3498db;
cursor: pointer;
display: inline-block;
font-size: 14px;
font-weight: bold;
margin: 0;
padding: 12px 25px;
text-decoration: none;
text-transform: capitalize; }
.btn-primary table td {
background-color: #3498db; }
.btn-primary a {
background-color: #3498db;
border-color: #3498db;
color: #ffffff; }
/* -------------------------------------
OTHER STYLES THAT MIGHT BE USEFUL
------------------------------------- */
.last {
margin-bottom: 0; }
.first {
margin-top: 0; }
.align-center {
text-align: center; }
.align-right {
text-align: right; }
.align-left {
text-align: left; }
.clear {
clear: both; }
.mt0 {
margin-top: 0; }
.mb0 {
margin-bottom: 0; }
.preheader {
color: transparent;
display: none;
height: 0;
max-height: 0;
max-width: 0;
opacity: 0;
overflow: hidden;
mso-hide: all;
visibility: hidden;
width: 0; }
.powered-by a {
text-decoration: none; }
hr {
border: 0;
border-bottom: 1px solid #f6f6f6;
Margin: 20px 0; }
/* -------------------------------------
RESPONSIVE AND MOBILE FRIENDLY STYLES
------------------------------------- */
@media only screen and (max-width: 620px) {
table[class=body] h1 {
font-size: 28px !important;
margin-bottom: 10px !important; }
table[class=body] p,
table[class=body] ul,
table[class=body] ol,
table[class=body] td,
table[class=body] span,
table[class=body] a {
font-size: 16px !important; }
table[class=body] .wrapper,
table[class=body] .article {
padding: 10px !important; }
table[class=body] .content {
padding: 0 !important; }
table[class=body] .container {
padding: 0 !important;
width: 100% !important; }
table[class=body] .main {
border-left-width: 0 !important;
border-radius: 0 !important;
border-right-width: 0 !important; }
table[class=body] .btn table {
width: 100% !important; }
table[class=body] .btn a {
width: 100% !important; }
table[class=body] .img-responsive {
height: auto !important;
max-width: 100% !important;
width: auto !important; }}
/* -------------------------------------
PRESERVE THESE STYLES IN THE HEAD
------------------------------------- */
@media all {
.ExternalClass {
width: 100%; }
.ExternalClass,
.ExternalClass p,
.ExternalClass span,
.ExternalClass font,
.ExternalClass td,
.ExternalClass div {
line-height: 100%; }
.apple-link a {
color: inherit !important;
font-family: inherit !important;
font-size: inherit !important;
font-weight: inherit !important;
line-height: inherit !important;
text-decoration: none !important; }
.btn-primary table td:hover {
background-color: #34495e !important; }
.btn-primary a:hover {
background-color: #34495e !important;
border-color: #34495e !important; } }
</style>
</head>
<body class="">
<table border="0" cellpadding="0" cellspacing="0" class="body">
<tr>
<td>&nbsp;</td>
<td class="container">
<div class="content">
<!-- START CENTERED WHITE CONTAINER -->
<span class="preheader">This is preheader text. Some clients will show this text as a preview.</span>
<table class="main">
<!-- START MAIN CONTENT AREA -->
<tr>
<td class="wrapper">
<table border="0" cellpadding="0" cellspacing="0">
<tr>
<td>
<img src="classpath:images\OpenDMP.png" alt="OpenDMP" width="100" height="81">
<h2>You have made a request to unlink your email account in ARGOS.</h2>
<p>Please confirm that you want to unlink your {email} account.
<br/>The link will expire in {expiration_time}.</p>
<table border="0" cellpadding="0" cellspacing="0" class="btn btn-primary">
<tbody>
<tr>
<td align="left">
<table border="0" cellpadding="0" cellspacing="0">
<tbody>
<tr>
<td> <a href="{host}/unlink/confirmation/{confirmationToken}" target="_blank">Confirm Unlink Request</a> </td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</table>
</td>
</tr>
<!-- END MAIN CONTENT AREA -->
</table>
<!-- START FOOTER -->
<div class="footer">
</div>
<!-- END FOOTER -->
<!-- END CENTERED WHITE CONTAINER -->
</div>
</td>
<td>&nbsp;</td>
</tr>
</table>
</body>
</html>

View File

@ -30,7 +30,6 @@
"scripts": [
"node_modules/cookieconsent/build/cookieconsent.min.js",
"node_modules/tinymce/tinymce.min.js"
],
"vendorChunk": true,
"extractLicenses": false,

View File

@ -224,6 +224,13 @@ const appRoutes: Routes = [
title: 'GENERAL.TITLES.INDEX-MANAGMENT'
},
},
{
path: 'maintenance-tasks',
loadChildren: () => import('./ui/admin/maintenance-tasks/maintenance-tasks.module').then(m => m.MaintenanceTasksModule),
data: {
breadcrumb: true
},
},
{
path: 'login/admin',
loadChildren: () => import('./ui/auth/admin-login/admin-login.module').then(m => m.AdminLoginModule),

View File

@ -22,7 +22,7 @@ import { SidebarModule } from '@app/ui/sidebar/sidebar.module';
import { MomentUtcDateAdapter } from '@common/date/moment-utc-date-adapter';
import { CommonHttpModule } from '@common/http/common-http.module';
import { CommonUiModule } from '@common/ui/common-ui.module';
import { TranslateLoader, TranslateModule } from '@ngx-translate/core';
import { TranslateCompiler, TranslateLoader, TranslateModule } from '@ngx-translate/core';
import { DragulaModule } from 'ng2-dragula';
import { CookieService } from 'ngx-cookie-service';
import { NgcCookieConsentConfig, NgcCookieConsentModule } from 'ngx-cookieconsent';
@ -32,6 +32,7 @@ import { CultureService } from './core/services/culture/culture-service';
import { TranslateServerLoader } from './core/services/language/server.loader';
import { MatomoService } from './core/services/matomo/matomo-service';
import { GuidedTourModule } from './library/guided-tour/guided-tour.module';
import { OpenDMPCustomTranslationCompiler } from './utilities/translate/opendmp-custom-translation-compiler';
import { Oauth2DialogModule } from './ui/misc/oauth2-dialog/oauth2-dialog.module';
// AoT requires an exported function for factories
@ -84,6 +85,7 @@ const appearance: MatFormFieldDefaultOptions = {
AppRoutingModule,
CommonUiModule,
TranslateModule.forRoot({
compiler: { provide: TranslateCompiler, useClass: OpenDMPCustomTranslationCompiler },
loader: {
provide: TranslateLoader,
useFactory: HttpLoaderFactory,

View File

@ -47,6 +47,11 @@ import { TypeUtils } from './services/utilities/type-utils.service';
import { SpecialAuthGuard } from './special-auth-guard.service';
import {PrefillingService} from "@app/core/services/prefilling.service";
import { DepositRepositoriesService } from './services/deposit-repositories/deposit-repositories.service';
import { AboutService } from './services/about/about.service';
import { FaqService } from './services/faq/faq.service';
import { GlossaryService } from './services/glossary/glossary.service';
import { TermsOfServiceService } from './services/terms-of-service/terms-of-service.service';
import { UnlinkAccountEmailConfirmationService } from './services/unlink-account-email-confirmation/unlink-account-email-confirmation.service';
//
//
// This is shared module that provides all the services. Its imported only once on the AppModule.
@ -112,8 +117,13 @@ export class CoreServiceModule {
LanguageService,
LockService,
UserGuideService,
AboutService,
FaqService,
GlossaryService,
TermsOfServiceService,
CurrencyService,
MergeEmailConfirmationService,
UnlinkAccountEmailConfirmationService,
ConfigurationService,
{
provide: APP_INITIALIZER,

View File

@ -59,7 +59,7 @@ export interface Field {
data: any;
visible: Visibility;
validations: ValidationType[];
rdaCommonStandard: string;
schematics: string[];
export: boolean;
}

View File

@ -0,0 +1,5 @@
export class UnlinkAccountRequestModel {
userId: String;
email: String;
provider: number;
}

View File

@ -0,0 +1,23 @@
import { Injectable } from "@angular/core";
import { ConfigurationService } from "../configuration/configuration.service";
import { HttpClient, HttpResponse } from "@angular/common/http";
import { Observable } from "rxjs";
@Injectable()
export class AboutService {
private aboutUrl : string;
constructor(
private http: HttpClient,
private configurationService: ConfigurationService
) {
this.aboutUrl = `${configurationService.server}material/about`;
}
public getAbout(lang: string): Observable<HttpResponse<Blob>> {
return this.http.get(`${this.aboutUrl}/${lang}`, { responseType: 'blob', observe: 'response', headers: {'Content-type': 'text/html',
'Accept': 'text/html',
'Access-Control-Allow-Origin': this.configurationService.app,
'Access-Control-Allow-Credentials': 'true'} });
}
}

View File

@ -41,6 +41,11 @@ export class ConfigurationService extends BaseComponent {
return this._defaultLanguage;
}
private _availableLanguages: any[] = [];
get availableLanguages(): any[] {
return this._availableLanguages;
}
private _loginProviders: LoginProviders;
get loginProviders(): LoginProviders {
return this._loginProviders;
@ -127,6 +132,7 @@ export class ConfigurationService extends BaseComponent {
this._helpService = HelpService.parseValue(config.HelpService);
this._defaultCulture = config.defaultCulture;
this._defaultLanguage = config.defaultLanguage;
this._availableLanguages = config.availableLanguages;
this._loginProviders = LoginProviders.parseValue(config.loginProviders);
this._logging = Logging.parseValue(config.logging);
this._lockInterval = config.lockInterval;

View File

@ -19,9 +19,6 @@ import { ConfigurationService } from '../configuration/configuration.service';
@Injectable()
export class DatasetProfileService extends BaseService {
private rdaCommonStandards: String[];
private rdaCommonStandardsLoading: boolean;
private actionUrl: string;
private headers = new HttpHeaders();
@ -68,26 +65,20 @@ export class DatasetProfileService extends BaseService {
return this.httpClient.get(this.actionUrl + 'getXml/' + id, { responseType: 'blob', observe: 'response', headers: headerXml });
}
uploadFile(file: FileList, labelSent: string): Observable<DataTableData<DatasetListingModel>> {
uploadFile(file: FileList, labelSent: string, datasetProfileId?: string): Observable<DataTableData<DatasetListingModel>> {
const params = new BaseHttpParams();
params.interceptorContext = {
excludedInterceptors: [InterceptorType.JSONContentType]
};
const formData = new FormData();
formData.append('file', file[0], labelSent);
return this.http.post(this.actionUrl + "upload", formData, { params: params });
return (datasetProfileId === undefined || datasetProfileId == null)
? this.http.post(this.actionUrl + "upload", formData, { params: params })
: this.http.post(this.actionUrl + "upload/" + datasetProfileId, formData, { params: params });
}
getRDACommonStandards(): String[] {
if (!this.rdaCommonStandards && !this.rdaCommonStandardsLoading) { this.getRDACommonStandardsInternal(); }
return this.rdaCommonStandards;
}
private getRDACommonStandardsInternal() {
this.rdaCommonStandardsLoading = true;
return this.http.get<String[]>(this.actionUrl + "getRDACommonStandards").pipe(takeUntil(this._destroyed)).subscribe(x => {
this.rdaCommonStandards = x;
this.rdaCommonStandardsLoading = false;
});
searchSemantics(like: string): Observable<String[]> {
return this.http.get<String[]>(this.actionUrl + "getSemantics?query=" + like);
}
}

View File

@ -114,10 +114,6 @@ export class DmpService {
return this.http.post<DmpModel>(`${this.actionUrl}updateusers/${id}`, users, { headers: this.headers });
}
getDoi(id: string): Observable<string> {
return this.http.post<string>(this.actionUrl + 'createZenodoDoi/' + id, { headers: this.headers });
}
getDynamicField(requestItem: RequestItem<DynamicFieldGrantCriteria>): any {
return this.http.post<any>(this.actionUrl + 'dynamic', requestItem, { headers: this.headers });
}

View File

@ -0,0 +1,23 @@
import { Injectable } from "@angular/core";
import { ConfigurationService } from "../configuration/configuration.service";
import { HttpClient, HttpResponse } from "@angular/common/http";
import { Observable } from "rxjs";
@Injectable()
export class FaqService {
private faqUrl : string;
constructor(
private http: HttpClient,
private configurationService: ConfigurationService
) {
this.faqUrl = `${configurationService.server}material/faq`;
}
public getFaq(lang: string): Observable<HttpResponse<Blob>> {
return this.http.get(`${this.faqUrl}/${lang}`, { responseType: 'blob', observe: 'response', headers: {'Content-type': 'text/html',
'Accept': 'text/html',
'Access-Control-Allow-Origin': this.configurationService.app,
'Access-Control-Allow-Credentials': 'true'} });
}
}

View File

@ -0,0 +1,23 @@
import { Injectable } from "@angular/core";
import { ConfigurationService } from "../configuration/configuration.service";
import { HttpClient, HttpResponse } from "@angular/common/http";
import { Observable } from "rxjs";
@Injectable()
export class GlossaryService {
private glossaryUrl : string;
constructor(
private http: HttpClient,
private configurationService: ConfigurationService
) {
this.glossaryUrl = `${configurationService.server}material/glossary`;
}
public getGlossary(lang: string): Observable<HttpResponse<Blob>> {
return this.http.get(`${this.glossaryUrl}/${lang}`, { responseType: 'blob', observe: 'response', headers: {'Content-type': 'text/html',
'Accept': 'text/html',
'Access-Control-Allow-Origin': this.configurationService.app,
'Access-Control-Allow-Credentials': 'true'} });
}
}

View File

@ -7,8 +7,6 @@ import { BaseHttpService } from '../http/base-http.service';
import { Language } from '@app/models/language/Language';
import { ConfigurationService } from '../configuration/configuration.service';
const availableLanguages: any[] = require('../../../../assets/resources/language.json');
@Injectable()
export class LanguageService {
private currentLanguage: string;
@ -43,7 +41,7 @@ export class LanguageService {
public getCurrentLanguageName() {
let result: string = '';
availableLanguages.forEach(language => {
this.configurationService.availableLanguages.forEach(language => {
if (language.value === this.currentLanguage) {
result = this.translate.instant(language.label);
}

View File

@ -44,17 +44,17 @@ export class LoggingService {
switch (level) {
case LogLevel.Debug:
// tslint:disable-next-line:no-console
console.debug(objects.join(', '));
// console.debug(objects.join(', '));
break;
case LogLevel.Info:
// tslint:disable-next-line:no-console
console.info(objects.join(', '));
// console.info(objects.join(', '));
break;
case LogLevel.Warning:
console.warn(objects.join(', '));
// console.warn(objects.join(', '));
break;
case LogLevel.Error:
console.error(objects.join(', '));
// console.error(objects.join(', '));
break;
}
});

View File

@ -0,0 +1,24 @@
import { Injectable } from "@angular/core";
import { BaseService } from "@common/base/base.service";
import { Observable } from "rxjs";
import { ConfigurationService } from "../configuration/configuration.service";
import { BaseHttpService } from "../http/base-http.service";
@Injectable()
export class MaintenanceTasksService extends BaseService {
private actionUrl: string;
constructor(private http: BaseHttpService, configurationService: ConfigurationService) {
super();
this.actionUrl = configurationService.server + 'management/';
}
migrateSemantics(): Observable<void> {
return this.http.post<null>(this.actionUrl + 'addSemantics/', null);
}
addRdaInSemantics(): Observable<void> {
return this.http.post<null>(this.actionUrl + 'addRdaInSemantics/', null);
}
}

View File

@ -1,16 +1,19 @@
import { HttpHeaders, HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Guid } from '@common/types/guid';
import * as pk from 'pako';
import { ConfigurationService } from './configuration/configuration.service';
import { BaseHttpService } from './http/base-http.service';
import { Observable } from 'rxjs';
import { BaseComponent } from '@common/base/base.component';
@Injectable()
export class SamlLoginService {
export class SamlLoginService extends BaseComponent {
constructor() {}
private actionUrl: string;
private headers = new HttpHeaders();
buildRelayState(spId: string, configurableLoginId: string): string {
let uri = 'spId=' + spId;
uri += '&configurableLoginId=' + configurableLoginId;
return encodeURIComponent(uri);
constructor(private http: BaseHttpService, private httpClient: HttpClient, private configurationService: ConfigurationService) {
super();
this.actionUrl = configurationService.server + 'saml2/';
}
resolveConfigurableLoginId(relayState: string): string {
@ -24,29 +27,8 @@ export class SamlLoginService {
return routeParams.has('spId') ? routeParams.get('spId') : '';
}
getSamlLoginUrl(spEntityID: string, idpUrl: string, binding: string, assertionConsumerServiceUrl: string, configurableLoginId: string) {
const now = new Date();
let protocolBinding = '';
switch (binding) {
case "Redirect": protocolBinding = 'ProtocolBinding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" '; break;
case "Artifact": protocolBinding = 'ProtocolBinding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact" '; break;
case "Post": protocolBinding = 'ProtocolBinding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" '; break;
}
const authenticationRequest = '<saml2p:AuthnRequest xmlns:saml2p="urn:oasis:names:tc:SAML:2.0:protocol" xmlns:saml2="urn:oasis:names:tc:SAML:2.0:assertion" ID="_' + Guid.create() + '" Version="2.0" ' +
'IssueInstant="' + now.toISOString() + '" ' +
protocolBinding +
'AssertionConsumerServiceUrl="' + assertionConsumerServiceUrl + '" ' +
'Destination="' + idpUrl + '">' +
'<saml2:Issuer>' + spEntityID + '</saml2:Issuer>' +
'</saml2p:AuthnRequest>';
const uint = new Uint8Array(authenticationRequest.length);
for (let i = 0, j = authenticationRequest.length; i < j; ++i) {
uint[i] = authenticationRequest.charCodeAt(i);
}
const base64String = btoa(pk.deflateRaw(uint, { to: 'string' }));
const relayState = this.buildRelayState(spEntityID, configurableLoginId);
const url = idpUrl + '?RelayState=' + relayState + '&SAMLRequest=' + encodeURIComponent(base64String);
return url;
getAuthnRequest(configurableLoginId: string): Observable<string> {
return this.http.get<string>(this.actionUrl + 'authnRequest/' + configurableLoginId, { headers: this.headers });
}
}

View File

@ -0,0 +1,23 @@
import { Injectable } from "@angular/core";
import { ConfigurationService } from "../configuration/configuration.service";
import { HttpClient, HttpResponse } from "@angular/common/http";
import { Observable } from "rxjs";
@Injectable()
export class TermsOfServiceService {
private termsOfServiceUrl : string;
constructor(
private http: HttpClient,
private configurationService: ConfigurationService
) {
this.termsOfServiceUrl = `${configurationService.server}material/termsofservice`;
}
public getTermsOfService(lang: string): Observable<HttpResponse<Blob>> {
return this.http.get(`${this.termsOfServiceUrl}/${lang}`, { responseType: 'blob', observe: 'response', headers: {'Content-type': 'text/html',
'Accept': 'text/html',
'Access-Control-Allow-Origin': this.configurationService.app,
'Access-Control-Allow-Credentials': 'true'} });
}
}

Some files were not shown because too many files have changed in this diff Show More