Merge commit '8ca2b912d40ece8bf0fbcda5c49f1ef7f5f6fdfd'
This commit is contained in:
commit
4ee39074a3
|
@ -38,3 +38,5 @@ dmp-frontend/.vscode/
|
|||
|
||||
|
||||
dmp-frontend/package-lock.json
|
||||
dmp-backend/logging/target/
|
||||
ELK.Docker/shared/data-elk/
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
TAG=6.3.1
|
||||
ELASTIC_VERSION=6.3.1
|
||||
ELASTIC_PASSWORD=changeme
|
||||
ELK_VERSION=7.6.0
|
||||
# Leave blank to use the "basic" image flavours, which include X-Pack.
|
||||
# see https://www.elastic.co/subscriptions
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
# Declare files that will always have LF line endings on checkout.
|
||||
*.sh text eol=lf
|
|
@ -1,201 +1,21 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
The MIT License (MIT)
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
Copyright (c) 2015 Anthony Lapenna
|
||||
|
||||
1. Definitions.
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
|
@ -1,25 +1,4 @@
|
|||
# stack-docker
|
||||
This example Docker Compose configuration demonstrates many components of the
|
||||
Elastic Stack, all running on a single machine under Docker.
|
||||
Init default users and retrieve passwords
|
||||
|
||||
## Prerequisites
|
||||
- Docker and Compose. Windows and Mac users get Compose installed automatically
|
||||
with Docker. Linux users can:
|
||||
```
|
||||
pip install docker-compose
|
||||
```
|
||||
|
||||
- At least 4GiB of RAM for the containers. Windows and Mac users _must_
|
||||
configure their Docker virtual machine to have more than the default 2 GiB of
|
||||
RAM:
|
||||
|
||||
![Docker VM memory settings](screenshots/docker-vm-memory-settings.png)
|
||||
|
||||
## Starting the stack
|
||||
Try `docker-compose up` to create a demonstration Elastic Stack with
|
||||
Elasticsearch, Kibana, Logstash, Auditbeat, Metricbeat, Filebeat, Packetbeat,
|
||||
and Heartbeat.
|
||||
|
||||
Point a browser at [`http://localhost:5601`](http://localhost:5601) to see the results.
|
||||
|
||||
Log in with `elastic` / `changeme`.
|
||||
1) connect to elasticsearch container with docker exec -it elastichsearch /bin/bash
|
||||
2) run ./bin/elasticsearch-setup-passwords auto >./data/passwords.txt (press y and enter when the console shows nothing)
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
version: '2.4'
|
||||
|
||||
services:
|
||||
elasticsearch:
|
||||
user: 1002:1002 #develuser
|
||||
restart: unless-stopped
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- cluster.name=open-dmp-cluster
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xmx1024m -Xms1024m"
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.monitoring.collection.enabled=true
|
||||
- xpack.security.enabled=true
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- ./shared/config-elk/elasticsearch/config/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- ./shared/config-elk/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
||||
- ./shared/data-elk/elasticsearch-01-data:/usr/share/elasticsearch/data
|
||||
- ./shared/data-elk/elasticsearch-01-log:/usr/share/elasticsearch/logs
|
||||
#ports:
|
||||
# - 51056:9200
|
||||
# - 51057:9300
|
||||
ports:
|
||||
- "9200:9200"
|
||||
expose:
|
||||
- "9300"
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
|
||||
logstash:
|
||||
# user: 1002:1002 #develuser
|
||||
volumes:
|
||||
- ./shared/config-elk/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
|
||||
- ./shared/config-elk/logstash/config/pipelines.yml:/usr/share/logstash/config/pipelines.yml:ro
|
||||
- ./shared/config-elk/logstash/config/log4j2.properties:/usr/share/logstash/config/log4j2.properties:ro
|
||||
- ./shared/config-elk/logstash/pipeline:/usr/share/logstash/pipeline:ro
|
||||
- ./shared/config-elk/logstash/logstash/templates:/usr/share/logstash/templates
|
||||
- ./shared/data-elk/logstash-log:/usr/share/logstash/logs
|
||||
- ./shared/data-elk/logstash-queue:/usr/share/logstash/queue
|
||||
- ./shared/data-elk/logstash-dead_letter_queue:/usr/share/logstash/dead_letter_queue
|
||||
expose:
|
||||
- "31311"
|
||||
- "31312"
|
||||
restart: on-failure
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- LS_JAVA_OPTS=-Xmx1024m -Xms1024m
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
|
||||
kibana:
|
||||
# user: 1002:1002 #develuser
|
||||
mem_limit: 512m
|
||||
environment:
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
|
||||
volumes:
|
||||
- ./shared/config-elk/kibana/config:/usr/share/kibana/config:ro
|
||||
#- ./shared/config-elk/kibana/certificates:/usr/share/kibana/certificates
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "51058:5601"
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
filebeat:
|
||||
restart: unless-stopped
|
||||
mem_limit: 256m
|
||||
#command: [ "-e=false" ] # to overwrite the -e that disables logging to file!
|
||||
volumes:
|
||||
- ./shared/config-elk/filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro
|
||||
- ~/openDMP/logs:/usr/share/filebeat/log_data/dmp/
|
||||
- ./shared/data-elk/filebeat-log:/usr/share/filebeat/logs
|
||||
- ./shared/data-elk/filebeat-data:/usr/share/filebeat/data #For windows if we mount the data directory we get "Writing of registry returned error: sync /usr/share/filebeat/data/registry/filebeat: invalid argument."
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
networks:
|
||||
open-dmp-elk-network:
|
|
@ -1,171 +1,43 @@
|
|||
---
|
||||
version: '3'
|
||||
services:
|
||||
# The environment variable "TAG" is used throughout this file to
|
||||
# specify the version of the images to run. The default is set in the
|
||||
# '.env' file in this folder. It can be overridden with any normal
|
||||
# technique for setting environment variables, for example:
|
||||
#
|
||||
# TAG=6.0.0-beta1 docker-compose up
|
||||
#
|
||||
# REF: https://docs.docker.com/compose/compose-file/#variable-substitution
|
||||
#
|
||||
# Also be sure to set the ELASTIC_VERSION variable. For released versions,
|
||||
# ${TAG} and ${ELASTIC_VERSION} will be identical, but for pre-release
|
||||
# versions, ${TAG} might contain an extra build identifier, like
|
||||
# "6.0.0-beta1-3eab5b40", so a full invocation might look like:
|
||||
#
|
||||
# ELASTIC_VERSION=6.0.0-beta1 TAG=6.0.0-beta1-3eab5b40 docker-compose up
|
||||
#
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch
|
||||
#volumes:
|
||||
# - esdata:/usr/share/elasticsearch/data
|
||||
environment: ['http.host=0.0.0.0', 'transport.host=127.0.0.1', 'ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
ports: ['0.0.0.0:9200:9200']
|
||||
networks: ['stack']
|
||||
version: '2.4'
|
||||
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:${TAG}
|
||||
container_name: kibana
|
||||
ports: ['0.0.0.0:5601:5601']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
services:
|
||||
elasticsearch:
|
||||
image: ${DOCKER_REGISTRY}elasticsearch
|
||||
container_name: elasticsearch
|
||||
build:
|
||||
context: elasticsearch/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
healthcheck:
|
||||
# test: curl --cacert /usr/share/elasticsearch/config/certificates/ca/ca.crt -s https://localhost:9200 >/dev/null; if [[ $$? == 52 ]]; then echo 0; else echo 1; fi
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${TAG}
|
||||
image: ${DOCKER_REGISTRY}logstash
|
||||
container_name: logstash
|
||||
# Provide a simple pipeline configuration for Logstash with a bind-mounted file.
|
||||
volumes:
|
||||
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||
ports: ['0.0.0.0:31311:31311']
|
||||
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_logstash']
|
||||
build:
|
||||
context: logstash/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
|
||||
kibana:
|
||||
image: ${DOCKER_REGISTRY}kibana
|
||||
build:
|
||||
context: kibana/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: filebeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# If the host system has logs at "/var/log", mount them at "/mnt/log"
|
||||
# inside the container, where Filebeat can find them.
|
||||
# volumes: ['/var/log:/mnt/log:ro']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_filebeat']
|
||||
|
||||
heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: heartbeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_heartbeat']
|
||||
|
||||
# Run a short-lived container to set up Logstash.
|
||||
setup_logstash:
|
||||
image: centos:7
|
||||
container_name: setup_logstash
|
||||
volumes: ['./scripts/setup-logstash.sh:/usr/local/bin/setup-logstash.sh:ro']
|
||||
# The script may have CR/LF line endings if using Docker for Windows, so
|
||||
# make sure that they don't confuse Bash.
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-logstash.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_kibana:
|
||||
image: centos:7
|
||||
container_name: setup_kibana
|
||||
volumes: ['./scripts/setup-kibana.sh:/usr/local/bin/setup-kibana.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-kibana.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: setup_filebeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s filebeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
setup_heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: setup_heartbeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s heartbeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
|
||||
##########################DOCSBOX######################################################################
|
||||
web:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
expose:
|
||||
- "8000"
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- docsbox:/home/docsbox
|
||||
- media:/home/docsbox/media
|
||||
command: gunicorn -b :8000 docsbox:app
|
||||
networks: ['stack']
|
||||
|
||||
rqworker:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- web
|
||||
command: rq worker -c docsbox.settings
|
||||
networks: ['stack']
|
||||
|
||||
rqscheduler:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- web
|
||||
command: rqscheduler -H redis -p 6379 -d 0
|
||||
networks: ['stack']
|
||||
|
||||
nginx:
|
||||
restart: always
|
||||
build: ./docsbox-master/nginx/
|
||||
ports:
|
||||
- "81:80"
|
||||
volumes:
|
||||
- web
|
||||
links:
|
||||
- web:web
|
||||
networks: ['stack']
|
||||
|
||||
redis:
|
||||
restart: always
|
||||
image: redis:latest
|
||||
expose:
|
||||
- "6379"
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
networks: ['stack']
|
||||
|
||||
|
||||
##########################SETTIGNS######################################################################
|
||||
|
||||
volumes:
|
||||
#esdata:
|
||||
#driver: local
|
||||
redisdata:
|
||||
driver: local
|
||||
docsbox:
|
||||
driver: local
|
||||
media:
|
||||
driver: local
|
||||
networks: {stack: {}}
|
||||
image: ${DOCKER_REGISTRY}filebeat
|
||||
build:
|
||||
context: filebeat/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- logstash
|
||||
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/elasticsearch-docker
|
||||
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}
|
||||
|
||||
RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install analysis-icu && \
|
||||
/usr/share/elasticsearch/bin/elasticsearch-plugin install analysis-phonetic
|
||||
|
||||
RUN groupmod -g 1002 elasticsearch
|
||||
RUN usermod -u 1002 -g 1002 elasticsearch
|
||||
RUN chown -R elasticsearch /usr/share/elasticsearch
|
||||
RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
-e 's/UID 1000/UID 1002/' \
|
||||
-e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint.sh
|
||||
RUN chown elasticsearch /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENV JAVA_HOME /usr/share/elasticsearch/jdk
|
||||
|
||||
# RUN mkdir /usr/share/elasticsearch/custom-plugins
|
||||
# COPY plugins/elasticsearch-analysis-greeklish-7.5.1.zip /usr/share/elasticsearch/custom-plugins/elasticsearch-analysis-greeklish-7.5.1.zip
|
||||
|
||||
# RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install file:///usr/share/elasticsearch/custom-plugins/elasticsearch-analysis-greeklish-7.5.1.zip
|
|
@ -0,0 +1,15 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
FROM docker.elastic.co/beats/filebeat:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 filebeat
|
||||
# RUN usermod -u 1002 -g 1002 filebeat
|
||||
# RUN chown -R filebeat /usr/share/filebeat
|
||||
# RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
# -e 's/UID 1000/UID 1002/' \
|
||||
# -e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint
|
||||
# RUN chown filebeat /usr/local/bin/docker-entrypoint
|
||||
|
||||
# USER 1002:1002
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/kibana-docker
|
||||
FROM docker.elastic.co/kibana/kibana:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 kibana
|
||||
# RUN usermod -g 1002 root
|
||||
# RUN usermod -u 1002 -g 1002 kibana
|
||||
# RUN chown -R kibana /usr/share/kibana
|
||||
|
||||
# USER 1002:1002
|
||||
|
||||
# Add your kibana plugins setup here
|
||||
# Example: RUN kibana-plugin install <name|url>
|
|
@ -0,0 +1,20 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/logstash-docker
|
||||
FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 logstash
|
||||
# RUN usermod -u 1002 -g 1002 logstash
|
||||
# RUN chown -R logstash /usr/share/logstash
|
||||
# RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
# -e 's/UID 1000/UID 1002/' \
|
||||
# -e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint
|
||||
# RUN chown logstash /usr/local/bin/docker-entrypoint
|
||||
|
||||
# USER 1002:1002
|
||||
|
||||
# Add your logstash plugins setup here
|
||||
# Example: RUN logstash-plugin install logstash-filter-json
|
||||
RUN logstash-plugin update logstash-input-beats
|
||||
RUN logstash-plugin update logstash-filter-grok
|
|
@ -0,0 +1,3 @@
|
|||
TAG=6.3.1
|
||||
ELASTIC_VERSION=6.3.1
|
||||
ELASTIC_PASSWORD=changeme
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,25 @@
|
|||
# stack-docker
|
||||
This example Docker Compose configuration demonstrates many components of the
|
||||
Elastic Stack, all running on a single machine under Docker.
|
||||
|
||||
## Prerequisites
|
||||
- Docker and Compose. Windows and Mac users get Compose installed automatically
|
||||
with Docker. Linux users can:
|
||||
```
|
||||
pip install docker-compose
|
||||
```
|
||||
|
||||
- At least 4GiB of RAM for the containers. Windows and Mac users _must_
|
||||
configure their Docker virtual machine to have more than the default 2 GiB of
|
||||
RAM:
|
||||
|
||||
![Docker VM memory settings](screenshots/docker-vm-memory-settings.png)
|
||||
|
||||
## Starting the stack
|
||||
Try `docker-compose up` to create a demonstration Elastic Stack with
|
||||
Elasticsearch, Kibana, Logstash, Auditbeat, Metricbeat, Filebeat, Packetbeat,
|
||||
and Heartbeat.
|
||||
|
||||
Point a browser at [`http://localhost:5601`](http://localhost:5601) to see the results.
|
||||
|
||||
Log in with `elastic` / `changeme`.
|
|
@ -0,0 +1,171 @@
|
|||
---
|
||||
version: '3'
|
||||
services:
|
||||
# The environment variable "TAG" is used throughout this file to
|
||||
# specify the version of the images to run. The default is set in the
|
||||
# '.env' file in this folder. It can be overridden with any normal
|
||||
# technique for setting environment variables, for example:
|
||||
#
|
||||
# TAG=6.0.0-beta1 docker-compose up
|
||||
#
|
||||
# REF: https://docs.docker.com/compose/compose-file/#variable-substitution
|
||||
#
|
||||
# Also be sure to set the ELASTIC_VERSION variable. For released versions,
|
||||
# ${TAG} and ${ELASTIC_VERSION} will be identical, but for pre-release
|
||||
# versions, ${TAG} might contain an extra build identifier, like
|
||||
# "6.0.0-beta1-3eab5b40", so a full invocation might look like:
|
||||
#
|
||||
# ELASTIC_VERSION=6.0.0-beta1 TAG=6.0.0-beta1-3eab5b40 docker-compose up
|
||||
#
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch
|
||||
#volumes:
|
||||
# - esdata:/usr/share/elasticsearch/data
|
||||
environment: ['http.host=0.0.0.0', 'transport.host=127.0.0.1', 'ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
ports: ['0.0.0.0:9200:9200']
|
||||
networks: ['stack']
|
||||
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:${TAG}
|
||||
container_name: kibana
|
||||
ports: ['0.0.0.0:5601:5601']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${TAG}
|
||||
container_name: logstash
|
||||
# Provide a simple pipeline configuration for Logstash with a bind-mounted file.
|
||||
volumes:
|
||||
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||
ports: ['0.0.0.0:31311:31311']
|
||||
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_logstash']
|
||||
|
||||
filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: filebeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# If the host system has logs at "/var/log", mount them at "/mnt/log"
|
||||
# inside the container, where Filebeat can find them.
|
||||
# volumes: ['/var/log:/mnt/log:ro']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_filebeat']
|
||||
|
||||
heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: heartbeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_heartbeat']
|
||||
|
||||
# Run a short-lived container to set up Logstash.
|
||||
setup_logstash:
|
||||
image: centos:7
|
||||
container_name: setup_logstash
|
||||
volumes: ['./scripts/setup-logstash.sh:/usr/local/bin/setup-logstash.sh:ro']
|
||||
# The script may have CR/LF line endings if using Docker for Windows, so
|
||||
# make sure that they don't confuse Bash.
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-logstash.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_kibana:
|
||||
image: centos:7
|
||||
container_name: setup_kibana
|
||||
volumes: ['./scripts/setup-kibana.sh:/usr/local/bin/setup-kibana.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-kibana.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: setup_filebeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s filebeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
setup_heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: setup_heartbeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s heartbeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
|
||||
##########################DOCSBOX######################################################################
|
||||
# web:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# expose:
|
||||
# - "8000"
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - docsbox:/home/docsbox
|
||||
# - media:/home/docsbox/media
|
||||
# command: gunicorn -b :8000 docsbox:app
|
||||
# networks: ['stack']
|
||||
#
|
||||
# rqworker:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rq worker -c docsbox.settings
|
||||
# networks: ['stack']
|
||||
#
|
||||
# rqscheduler:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rqscheduler -H redis -p 6379 -d 0
|
||||
# networks: ['stack']
|
||||
#
|
||||
# nginx:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/nginx/
|
||||
# ports:
|
||||
# - "81:80"
|
||||
# volumes:
|
||||
# - web
|
||||
# links:
|
||||
# - web:web
|
||||
# networks: ['stack']
|
||||
#
|
||||
# redis:
|
||||
# restart: always
|
||||
# image: redis:latest
|
||||
# expose:
|
||||
# - "6379"
|
||||
# volumes:
|
||||
# - redisdata:/data
|
||||
# networks: ['stack']
|
||||
|
||||
|
||||
##########################SETTIGNS######################################################################
|
||||
|
||||
volumes:
|
||||
#esdata:
|
||||
#driver: local
|
||||
redisdata:
|
||||
driver: local
|
||||
docsbox:
|
||||
driver: local
|
||||
media:
|
||||
driver: local
|
||||
networks: {stack: {}}
|
||||
|
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
## Default Elasticsearch configuration from elasticsearch-docker.
|
||||
## from https://github.com/elastic/elasticsearch-docker/blob/master/build/elasticsearch/elasticsearch.yml
|
||||
#
|
||||
network.host: 0.0.0.0
|
||||
|
||||
# minimum_master_nodes need to be explicitly set when bound on a public IP
|
||||
# set to 1 to allow single node clusters
|
||||
# Details: https://github.com/elastic/elasticsearch/pull/17288
|
||||
# discovery.zen.minimum_master_nodes: 1
|
||||
|
||||
## Use single node discovery in order to disable production mode and avoid bootstrap checks
|
||||
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
|
||||
#
|
||||
discovery.type: single-node
|
||||
## Search Guard
|
||||
#
|
||||
cluster.routing.allocation.disk.watermark.flood_stage: 99%
|
||||
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
#https://github.com/elastic/elasticsearch/blob/7.4/distribution/src/config/log4j2.properties
|
||||
|
||||
status = error
|
||||
|
||||
# log action execution errors for easier debugging
|
||||
logger.action.name = org.elasticsearch.action
|
||||
logger.action.level = debug
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
######## Server JSON ############################
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.json
|
||||
appender.rolling.layout.type = ESJsonLayout
|
||||
appender.rolling.layout.type_name = server
|
||||
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policies.size.size = 128MB
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.fileIndex = nomax
|
||||
appender.rolling.strategy.action.type = Delete
|
||||
appender.rolling.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling.strategy.action.condition.type = IfFileName
|
||||
appender.rolling.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
######## Server - old style pattern ###########
|
||||
appender.rolling_old.type = RollingFile
|
||||
appender.rolling_old.name = rolling_old
|
||||
appender.rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling_old.layout.type = PatternLayout
|
||||
appender.rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling_old.policies.type = Policies
|
||||
appender.rolling_old.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.time.interval = 1
|
||||
appender.rolling_old.policies.time.modulate = true
|
||||
appender.rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.size.size = 128MB
|
||||
appender.rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling_old.strategy.fileIndex = nomax
|
||||
appender.rolling_old.strategy.action.type = Delete
|
||||
appender.rolling_old.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling_old.strategy.action.condition.type = IfFileName
|
||||
appender.rolling_old.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
rootLogger.appenderRef.rolling_old.ref = rolling_old
|
||||
|
||||
######## Deprecation JSON #######################
|
||||
appender.deprecation_rolling.type = RollingFile
|
||||
appender.deprecation_rolling.name = deprecation_rolling
|
||||
appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json
|
||||
appender.deprecation_rolling.layout.type = ESJsonLayout
|
||||
appender.deprecation_rolling.layout.type_name = deprecation
|
||||
appender.deprecation_rolling.layout.esmessagefields=x-opaque-id
|
||||
|
||||
appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz
|
||||
appender.deprecation_rolling.policies.type = Policies
|
||||
appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling.policies.size.size = 1GB
|
||||
appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Deprecation - old style pattern #######
|
||||
appender.deprecation_rolling_old.type = RollingFile
|
||||
appender.deprecation_rolling_old.name = deprecation_rolling_old
|
||||
appender.deprecation_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_rolling_old.layout.type = PatternLayout
|
||||
appender.deprecation_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.deprecation_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_deprecation-%i.log.gz
|
||||
appender.deprecation_rolling_old.policies.type = Policies
|
||||
appender.deprecation_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling_old.policies.size.size = 1GB
|
||||
appender.deprecation_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.deprecation.name = org.elasticsearch.deprecation
|
||||
logger.deprecation.level = warn
|
||||
logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
|
||||
logger.deprecation.appenderRef.deprecation_rolling_old.ref = deprecation_rolling_old
|
||||
logger.deprecation.additivity = false
|
||||
|
||||
######## Search slowlog JSON ####################
|
||||
appender.index_search_slowlog_rolling.type = RollingFile
|
||||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog.json
|
||||
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
|
||||
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id
|
||||
|
||||
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog-%i.json.gz
|
||||
appender.index_search_slowlog_rolling.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Search slowlog - old style pattern ####
|
||||
appender.index_search_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_search_slowlog_rolling_old.name = index_search_slowlog_rolling_old
|
||||
appender.index_search_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog.log
|
||||
appender.index_search_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_search_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_search_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog-%i.log.gz
|
||||
appender.index_search_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.index_search_slowlog_rolling.name = index.search.slowlog
|
||||
logger.index_search_slowlog_rolling.level = trace
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling_old.ref = index_search_slowlog_rolling_old
|
||||
logger.index_search_slowlog_rolling.additivity = false
|
||||
|
||||
######## Indexing slowlog JSON ##################
|
||||
appender.index_indexing_slowlog_rolling.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.json
|
||||
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
|
||||
appender.index_indexing_slowlog_rolling.layout.esmessagefields=message,took,took_millis,doc_type,id,routing,source
|
||||
|
||||
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.json.gz
|
||||
appender.index_indexing_slowlog_rolling.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Indexing slowlog - old style pattern ##
|
||||
appender.index_indexing_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling_old.name = index_indexing_slowlog_rolling_old
|
||||
appender.index_indexing_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.log
|
||||
appender.index_indexing_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_indexing_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_indexing_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.log.gz
|
||||
appender.index_indexing_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
|
||||
logger.index_indexing_slowlog.name = index.indexing.slowlog.index
|
||||
logger.index_indexing_slowlog.level = trace
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old
|
||||
logger.index_indexing_slowlog.additivity = false
|
|
@ -0,0 +1,16 @@
|
|||
#filebeat.registry_file: /usr/share/filebeat/registry
|
||||
filebeat.inputs:
|
||||
- type: log
|
||||
paths:
|
||||
- /usr/share/filebeat/log_data/dmp/openDMP*.log
|
||||
tags: ["audit"]
|
||||
enabled: true
|
||||
reload.enabled: true
|
||||
reload.period: 10s
|
||||
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
|
||||
multiline.negate: true
|
||||
multiline.match: after
|
||||
|
||||
output.logstash:
|
||||
hosts: ["logstash:31312"]
|
||||
bulk_max_size: 128
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
## Default Kibana configuration from kibana-docker.
|
||||
## from https://github.com/elastic/kibana-docker/blob/master/build/kibana/config/kibana.yml
|
||||
#
|
||||
server.name: kibana
|
||||
server.host: "0"
|
||||
## Custom configuration
|
||||
#
|
||||
#server.basePath: "/eformslogs"
|
||||
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
|
||||
#elasticsearch.ssl.certificateAuthorities: [ "/usr/share/kibana/certificate_authorities/ca.crt" ]
|
||||
|
||||
elasticsearch.username: "kibana"
|
||||
elasticsearch.password: ""
|
||||
server.ssl.enabled: false
|
||||
#server.ssl.key: "/usr/share/kibana/certificates/kibana.key"
|
||||
#server.ssl.certificate: "/usr/share/kibana/certificates/kibana.crt"
|
|
@ -0,0 +1,103 @@
|
|||
#https://github.com/elastic/logstash/blob/7.4/config/log4j2.properties
|
||||
|
||||
status = error
|
||||
name = LogstashPropertiesConfig
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = plain_console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]} %m%n
|
||||
|
||||
appender.json_console.type = Console
|
||||
appender.json_console.name = json_console
|
||||
appender.json_console.layout.type = JSONLayout
|
||||
appender.json_console.layout.compact = true
|
||||
appender.json_console.layout.eventEol = true
|
||||
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = plain_rolling
|
||||
appender.rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
|
||||
appender.rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]} %m%n
|
||||
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policies.size.size = 100MB
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.max = 30
|
||||
|
||||
appender.json_rolling.type = RollingFile
|
||||
appender.json_rolling.name = json_rolling
|
||||
appender.json_rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
|
||||
appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.json_rolling.policies.type = Policies
|
||||
appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.json_rolling.policies.time.interval = 1
|
||||
appender.json_rolling.policies.time.modulate = true
|
||||
appender.json_rolling.layout.type = JSONLayout
|
||||
appender.json_rolling.layout.compact = true
|
||||
appender.json_rolling.layout.eventEol = true
|
||||
appender.json_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.json_rolling.policies.size.size = 100MB
|
||||
appender.json_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.json_rolling.strategy.max = 30
|
||||
|
||||
rootLogger.level = ${sys:ls.log.level}
|
||||
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console
|
||||
rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling
|
||||
|
||||
# Slowlog
|
||||
|
||||
appender.console_slowlog.type = Console
|
||||
appender.console_slowlog.name = plain_console_slowlog
|
||||
appender.console_slowlog.layout.type = PatternLayout
|
||||
appender.console_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
|
||||
|
||||
appender.json_console_slowlog.type = Console
|
||||
appender.json_console_slowlog.name = json_console_slowlog
|
||||
appender.json_console_slowlog.layout.type = JSONLayout
|
||||
appender.json_console_slowlog.layout.compact = true
|
||||
appender.json_console_slowlog.layout.eventEol = true
|
||||
|
||||
appender.rolling_slowlog.type = RollingFile
|
||||
appender.rolling_slowlog.name = plain_rolling_slowlog
|
||||
appender.rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
|
||||
appender.rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling_slowlog.policies.type = Policies
|
||||
appender.rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling_slowlog.policies.time.interval = 1
|
||||
appender.rolling_slowlog.policies.time.modulate = true
|
||||
appender.rolling_slowlog.layout.type = PatternLayout
|
||||
appender.rolling_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
|
||||
appender.rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling_slowlog.policies.size.size = 100MB
|
||||
appender.rolling_slowlog.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling_slowlog.strategy.max = 30
|
||||
|
||||
appender.json_rolling_slowlog.type = RollingFile
|
||||
appender.json_rolling_slowlog.name = json_rolling_slowlog
|
||||
appender.json_rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
|
||||
appender.json_rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.json_rolling_slowlog.policies.type = Policies
|
||||
appender.json_rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.json_rolling_slowlog.policies.time.interval = 1
|
||||
appender.json_rolling_slowlog.policies.time.modulate = true
|
||||
appender.json_rolling_slowlog.layout.type = JSONLayout
|
||||
appender.json_rolling_slowlog.layout.compact = true
|
||||
appender.json_rolling_slowlog.layout.eventEol = true
|
||||
appender.json_rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.json_rolling_slowlog.policies.size.size = 100MB
|
||||
appender.json_rolling_slowlog.strategy.type = DefaultRolloverStrategy
|
||||
appender.json_rolling_slowlog.strategy.max = 30
|
||||
|
||||
logger.slowlog.name = slowlog
|
||||
logger.slowlog.level = trace
|
||||
logger.slowlog.appenderRef.console_slowlog.ref = ${sys:ls.log.format}_console_slowlog
|
||||
logger.slowlog.appenderRef.rolling_slowlog.ref = ${sys:ls.log.format}_rolling_slowlog
|
||||
logger.slowlog.additivity = false
|
||||
|
||||
logger.licensereader.name = logstash.licensechecker.licensereader
|
||||
logger.licensereader.level = error
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
## Default Logstash configuration from logstash-docker.
|
||||
## from https://github.com/elastic/logstash-docker/blob/master/build/logstash/config/logstash-oss.yml
|
||||
#
|
||||
http.host: "0.0.0.0"
|
||||
config.reload.automatic: true
|
||||
config.reload.interval: 300s
|
||||
path.queue: /usr/share/logstash/queue
|
||||
path.dead_letter_queue: /usr/share/logstash/dead_letter_queue
|
||||
xpack.monitoring.elasticsearch.password:
|
|
@ -0,0 +1,18 @@
|
|||
- pipeline.id: open_dmp_beats
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_beats.conf"
|
||||
queue.checkpoint.writes: 32
|
||||
- pipeline.id: open_dmp_main
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_main.conf"
|
||||
queue.checkpoint.writes: 32
|
||||
- pipeline.id: open_dmp_send_to_elastic
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_send_to_elastic.conf"
|
||||
queue.checkpoint.writes: 32
|
|
@ -0,0 +1,14 @@
|
|||
input {
|
||||
beats {
|
||||
port => 31312
|
||||
ssl => false
|
||||
client_inactivity_timeout => 3000
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
}
|
||||
|
||||
output {
|
||||
pipeline { send_to => open_dmp_main }
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
input {
|
||||
pipeline { address => open_dmp_main }
|
||||
}
|
||||
|
||||
filter {
|
||||
grok {
|
||||
match => { "message" => "(?<timestamp>%{DATE} %{TIME})%{SPACE}%{LOGLEVEL:level} %{NUMBER:pid} --- \[%{DATA:thread}\] %{DATA:class}%{SPACE}: %{GREEDYDATA:logmessage}" }
|
||||
}
|
||||
if "_grokparsefailure" not in [tags] {
|
||||
mutate
|
||||
{
|
||||
remove_field => [ "message" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
pipeline { send_to => open_dmp_send_to_elastic }
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
input {
|
||||
pipeline { address => open_dmp_send_to_elastic }
|
||||
}
|
||||
|
||||
filter {
|
||||
}
|
||||
|
||||
output {
|
||||
elasticsearch {
|
||||
hosts => "elasticsearch:9200"
|
||||
user => elastic
|
||||
password =>
|
||||
index =>"opendmp.logs"
|
||||
#manage_template => true
|
||||
#template => "/usr/share/logstash/templates/audit/cite_elas_openDMP.json"
|
||||
#template_name => "cite.elas.openDMP-audit*"
|
||||
#template_overwrite => true
|
||||
}
|
||||
}
|
|
@ -4,7 +4,6 @@
|
|||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>eu.eudat</groupId>
|
||||
<artifactId>data</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
|
|
@ -2,6 +2,7 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Dataset;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -22,6 +23,8 @@ public class DatasetCriteria extends Criteria<Dataset> {
|
|||
private List<UUID> collaborators;
|
||||
private List<UUID> datasetTemplates;
|
||||
private List<UUID> groupIds;
|
||||
private Boolean isPublic;
|
||||
private Short grantStatus;
|
||||
|
||||
public boolean getAllVersions() {
|
||||
return allVersions;
|
||||
|
@ -113,4 +116,20 @@ public class DatasetCriteria extends Criteria<Dataset> {
|
|||
public void setGroupIds(List<UUID> groupIds) {
|
||||
this.groupIds = groupIds;
|
||||
}
|
||||
|
||||
public Boolean getIsPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
|
||||
public void setIsPublic(Boolean isPublic) {
|
||||
this.isPublic = isPublic;
|
||||
}
|
||||
|
||||
public Short getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
|
||||
public void setGrantStatus(Short grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
public class LockCriteria extends Criteria<Lock> {
|
||||
|
||||
private UUID target;
|
||||
private UserInfo lockedBy;
|
||||
private Date touchedAt;
|
||||
|
||||
public UUID getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(UUID target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public UserInfo getLockedBy() {
|
||||
return lockedBy;
|
||||
}
|
||||
|
||||
public void setLockedBy(UserInfo lockedBy) {
|
||||
this.lockedBy = lockedBy;
|
||||
}
|
||||
|
||||
public Date getTouchedAt() {
|
||||
return touchedAt;
|
||||
}
|
||||
|
||||
public void setTouchedAt(Date touchedAt) {
|
||||
this.touchedAt = touchedAt;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
|
||||
public class NotificationCriteria {
|
||||
|
||||
private ActiveStatus isActive;
|
||||
private NotifyState notifyState;
|
||||
|
||||
public ActiveStatus getIsActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setIsActive(ActiveStatus isActive) {
|
||||
this.isActive = isActive;
|
||||
}
|
||||
|
||||
public NotifyState getNotifyState() {
|
||||
return notifyState;
|
||||
}
|
||||
|
||||
public void setNotifyState(NotifyState notifyState) {
|
||||
this.notifyState = notifyState;
|
||||
}
|
||||
}
|
|
@ -43,8 +43,9 @@ public class DMPDaoImpl extends DatabaseAccess<DMP> implements DMPDao {
|
|||
query.where(((builder, root) -> root.get("grant").in(criteria.getGrants())));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("version"),
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("groupId"),
|
||||
nestedRoot.get("groupId")), Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "version")), String.class)));
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(
|
||||
builder1.equal(externalRoot.get("groupId"), nestedRoot.get("groupId")),
|
||||
builder1.notEqual(nestedRoot.get("status"), DMP.DMPStatus.DELETED.getValue())), Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("groupId").in(criteria.getGroupIds()));
|
||||
if (criteria.getStatus() != null) {
|
||||
|
|
|
@ -3,12 +3,14 @@ package eu.eudat.data.dao.entities;
|
|||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.DatasetCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.Dataset;
|
||||
import eu.eudat.data.entities.UserDMP;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.queryable.types.FieldSelectionType;
|
||||
import eu.eudat.queryable.types.SelectionField;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
@ -16,6 +18,7 @@ import org.springframework.stereotype.Component;
|
|||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
@ -29,6 +32,13 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
@Override
|
||||
public QueryableList<Dataset> getWithCriteria(DatasetCriteria criteria) {
|
||||
QueryableList<Dataset> query = getDatabaseService().getQueryable(Dataset.getHints(), Dataset.class);
|
||||
if (criteria.getIsPublic() != null && criteria.getIsPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("dmp").get("isPublic"), true));
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), Dataset.Status.FINALISED.getValue()));
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"),
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")),
|
||||
Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
}
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
|
@ -42,7 +52,7 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("created"), criteria.getPeriodStart()));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"), query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")), Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"), query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")), builder1.notEqual(nestedRoot.get("dmp").get("status"), DMP.DMPStatus.DELETED.getValue())), Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("dmp").get("groupId").in(criteria.getGroupIds()));
|
||||
if (criteria.getDmpIds() != null && !criteria.getDmpIds().isEmpty())
|
||||
|
@ -56,6 +66,14 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
query.where((builder, root) -> root.join("dmp").join("organisations").get("reference").in(criteria.getOrganisations()));
|
||||
if (criteria.getGrants() != null && !criteria.getGrants().isEmpty())
|
||||
query.where((builder, root) -> root.join("dmp").join("grant").get("id").in(criteria.getGrants()));
|
||||
if (criteria.getGrantStatus() != null) {
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.FINISHED.getValue().shortValue()))
|
||||
query.where((builder, root) -> builder.lessThan(root.get("dmp").get("grant").get("enddate"), new Date()));
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.ONGOING.getValue().shortValue()))
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.greaterThan(root.get("dmp").get("grant").get("enddate"), new Date())
|
||||
, builder.isNull(root.get("dmp").get("grant").get("enddate"))));
|
||||
}
|
||||
if (criteria.getCollaborators() != null && !criteria.getCollaborators().isEmpty())
|
||||
query.where((builder, root) -> root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id").in(criteria.getCollaborators()));
|
||||
if (criteria.getDatasetTemplates() != null && !criteria.getDatasetTemplates().isEmpty())
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface LockDao extends DatabaseAccessLayer<Lock, UUID> {
|
||||
|
||||
QueryableList<Lock> getWithCriteria(LockCriteria criteria);
|
||||
}
|
|
@ -0,0 +1,65 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("LockDao")
|
||||
public class LockDaoImpl extends DatabaseAccess<Lock> implements LockDao {
|
||||
|
||||
@Autowired
|
||||
public LockDaoImpl(DatabaseService<Lock> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> getWithCriteria(LockCriteria criteria) {
|
||||
QueryableList<Lock> query = this.getDatabaseService().getQueryable(Lock.class);
|
||||
if (criteria.getTouchedAt() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("touchedAt"), criteria.getTouchedAt()));
|
||||
if (criteria.getLockedBy() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("lockedBy"), criteria.getLockedBy())));
|
||||
if (criteria.getTarget() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("target"), criteria.getTarget())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock createOrUpdate(Lock item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Lock.class);
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<Lock> createOrUpdateAsync(Lock item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Lock.class).where(((builder, root) -> builder.equal(root.get("id"), id))).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Lock item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Lock.class);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.NotificationCriteria;
|
||||
import eu.eudat.data.entities.Notification;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface NotificationDao extends DatabaseAccessLayer<Notification, UUID> {
|
||||
|
||||
QueryableList<Notification> getWithCriteria(NotificationCriteria criteria);
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.NotificationCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.entities.Notification;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("NotificationDao")
|
||||
public class NotificationDaoImpl extends DatabaseAccess<Notification> implements NotificationDao {
|
||||
@Autowired
|
||||
public NotificationDaoImpl(DatabaseService<Notification> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Notification> getWithCriteria(NotificationCriteria criteria) {
|
||||
QueryableList<Notification> query = this.getDatabaseService().getQueryable(Notification.class);
|
||||
if (criteria.getIsActive() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("isActive"), criteria.getIsActive()));
|
||||
if (criteria.getNotifyState() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("notifyState"), criteria.getNotifyState())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification createOrUpdate(Notification item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Notification.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Notification> createOrUpdateAsync(Notification item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.getDatabaseService().createOrUpdate(item, Notification.class));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Notification.class).where(((builder, root) -> builder.equal(root.get("id"), id))).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Notification item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Notification> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Notification.class);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
package eu.eudat.data.entities;
|
||||
|
||||
import eu.eudat.data.converters.DateToUTCConverter;
|
||||
import eu.eudat.data.entities.helpers.EntityBinder;
|
||||
import eu.eudat.queryable.queryableentity.DataEntity;
|
||||
import org.hibernate.annotations.GenericGenerator;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "\"Lock\"")
|
||||
public class Lock implements DataEntity<Lock, UUID> {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
@GenericGenerator(name = "uuid2", strategy = "uuid2")
|
||||
@Column(name = "id", updatable = false, nullable = false, columnDefinition = "BINARY(16)")
|
||||
private UUID id;
|
||||
|
||||
@Column(name = "\"Target\"", nullable = false)
|
||||
private UUID target;
|
||||
|
||||
@ManyToOne(fetch = FetchType.EAGER)
|
||||
@JoinColumn(name = "\"LockedBy\"", nullable = false)
|
||||
private UserInfo lockedBy;
|
||||
|
||||
@Column(name = "\"LockedAt\"")
|
||||
@Convert(converter = DateToUTCConverter.class)
|
||||
private Date lockedAt = new Date();
|
||||
|
||||
@Column(name = "\"TouchedAt\"")
|
||||
@Convert(converter = DateToUTCConverter.class)
|
||||
private Date touchedAt = null;
|
||||
|
||||
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public UUID getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(UUID target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public UserInfo getLockedBy() {
|
||||
return lockedBy;
|
||||
}
|
||||
|
||||
public void setLockedBy(UserInfo lockedBy) {
|
||||
this.lockedBy = lockedBy;
|
||||
}
|
||||
|
||||
public Date getLockedAt() {
|
||||
return lockedAt;
|
||||
}
|
||||
|
||||
public void setLockedAt(Date lockedAt) {
|
||||
this.lockedAt = lockedAt;
|
||||
}
|
||||
|
||||
public Date getTouchedAt() {
|
||||
return touchedAt;
|
||||
}
|
||||
|
||||
public void setTouchedAt(Date touchedAt) {
|
||||
this.touchedAt = touchedAt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Lock entity) {
|
||||
this.touchedAt = entity.touchedAt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UUID getKeys() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock buildFromTuple(List<Tuple> tuple, List<String> fields, String base) {
|
||||
String currentBase = base.isEmpty() ? "" : base + ".";
|
||||
if (fields.contains(currentBase + "id")) this.id = EntityBinder.fromTuple(tuple, currentBase + "id");
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,175 @@
|
|||
package eu.eudat.data.entities;
|
||||
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.ContactType;
|
||||
import eu.eudat.data.enumeration.notification.NotificationType;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
import eu.eudat.queryable.queryableentity.DataEntity;
|
||||
import org.hibernate.annotations.GenericGenerator;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "\"Notification\"")
|
||||
public class Notification implements DataEntity<Notification, UUID> {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
@GenericGenerator(name = "uuid2", strategy = "uuid2")
|
||||
@Column(name = "id", updatable = false, nullable = false, columnDefinition = "BINARY(16)")
|
||||
private UUID id;
|
||||
|
||||
@ManyToOne(fetch = FetchType.EAGER)
|
||||
@JoinColumn(name = "\"UserId\"")
|
||||
private UserInfo userId;
|
||||
|
||||
@Enumerated
|
||||
@Column(name = "\"IsActive\"", nullable = false)
|
||||
private ActiveStatus isActive;
|
||||
|
||||
@Enumerated
|
||||
@Column(name = "\"Type\"", nullable = false)
|
||||
private NotificationType type;
|
||||
|
||||
@Enumerated
|
||||
@Column(name = "\"ContactTypeHint\"")
|
||||
private ContactType contactTypeHint;
|
||||
|
||||
@Column(name = "\"ContactHint\"")
|
||||
private String contactHint;
|
||||
|
||||
@Column(name = "\"Data\"")
|
||||
private String data;
|
||||
|
||||
@Enumerated
|
||||
@Column(name = "\"NotifyState\"")
|
||||
private NotifyState notifyState;
|
||||
|
||||
@Column(name = "\"NotifiedAt\"")
|
||||
private Date notifiedAt;
|
||||
|
||||
@Column(name = "\"RetryCount\"")
|
||||
private Integer retryCount;
|
||||
|
||||
@Column(name = "\"CreatedAt\"")
|
||||
private Date createdAt;
|
||||
|
||||
@Column(name = "\"UpdatedAt\"")
|
||||
private Date updatedAt;
|
||||
|
||||
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public UserInfo getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
public void setUserId(UserInfo userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
|
||||
public ActiveStatus getIsActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setIsActive(ActiveStatus isActive) {
|
||||
this.isActive = isActive;
|
||||
}
|
||||
|
||||
public NotificationType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public void setType(NotificationType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public ContactType getContactTypeHint() {
|
||||
return contactTypeHint;
|
||||
}
|
||||
|
||||
public void setContactTypeHint(ContactType contactTypeHint) {
|
||||
this.contactTypeHint = contactTypeHint;
|
||||
}
|
||||
|
||||
public String getContactHint() {
|
||||
return contactHint;
|
||||
}
|
||||
|
||||
public void setContactHint(String contactHint) {
|
||||
this.contactHint = contactHint;
|
||||
}
|
||||
|
||||
public String getData() {
|
||||
return data;
|
||||
}
|
||||
|
||||
public void setData(String data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public NotifyState getNotifyState() {
|
||||
return notifyState;
|
||||
}
|
||||
|
||||
public void setNotifyState(NotifyState notifyState) {
|
||||
this.notifyState = notifyState;
|
||||
}
|
||||
|
||||
public Date getNotifiedAt() {
|
||||
return notifiedAt;
|
||||
}
|
||||
|
||||
public void setNotifiedAt(Date notifiedAt) {
|
||||
this.notifiedAt = notifiedAt;
|
||||
}
|
||||
|
||||
public Integer getRetryCount() {
|
||||
return retryCount;
|
||||
}
|
||||
|
||||
public void setRetryCount(Integer retryCount) {
|
||||
this.retryCount = retryCount;
|
||||
}
|
||||
|
||||
public Date getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(Date createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public Date getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(Date updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Notification entity) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public UUID getKeys() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification buildFromTuple(List<Tuple> tuple, List<String> fields, String base) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -69,6 +69,12 @@ public class UserInfo implements DataEntity<UserInfo, UUID> {
|
|||
@OneToMany(mappedBy = "userInfo", fetch = FetchType.LAZY)
|
||||
private Set<UserRole> userRoles = new HashSet<>();
|
||||
|
||||
@OneToMany(mappedBy = "lockedBy", fetch = FetchType.LAZY)
|
||||
private Set<Lock> locks = new HashSet<>();
|
||||
|
||||
@OneToMany(mappedBy = "userId", fetch = FetchType.LAZY)
|
||||
private Set<Notification> notifications = new HashSet<>();
|
||||
|
||||
public Set<DMP> getDmps() {
|
||||
return dmps;
|
||||
}
|
||||
|
@ -165,6 +171,22 @@ public class UserInfo implements DataEntity<UserInfo, UUID> {
|
|||
this.userRoles = userRoles;
|
||||
}
|
||||
|
||||
public Set<Lock> getLocks() {
|
||||
return locks;
|
||||
}
|
||||
|
||||
public void setLocks(Set<Lock> locks) {
|
||||
this.locks = locks;
|
||||
}
|
||||
|
||||
public Set<Notification> getNotifications() {
|
||||
return notifications;
|
||||
}
|
||||
|
||||
public void setNotifications(Set<Notification> notifications) {
|
||||
this.notifications = notifications;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(UserInfo entity) {
|
||||
this.name = entity.getName();
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
package eu.eudat.data.enumeration.notification;
|
||||
|
||||
public enum ActiveStatus {
|
||||
ACTIVE(0),
|
||||
INACTIVE(1);
|
||||
|
||||
private int status;
|
||||
|
||||
ActiveStatus(int status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public int getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public ActiveStatus fromInteger(int status) {
|
||||
switch (status) {
|
||||
case 0:
|
||||
return ACTIVE;
|
||||
case 1:
|
||||
return INACTIVE;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported Active Status");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
package eu.eudat.data.enumeration.notification;
|
||||
|
||||
public enum ContactType {
|
||||
EMAIL(0);
|
||||
|
||||
private int type;
|
||||
|
||||
ContactType(int type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public int getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public ContactType fromInteger(int type) {
|
||||
switch (type) {
|
||||
case 0:
|
||||
return EMAIL;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported Contact Type");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package eu.eudat.data.enumeration.notification;
|
||||
|
||||
public enum NotificationType {
|
||||
DMP_MODIFIED(0),
|
||||
DATASET_MODIFIED(1),
|
||||
DMP_PUBLISH(2),
|
||||
DMP_FINALISED(3),
|
||||
DMP_MODIFIED_FINALISED(4),
|
||||
DATASET_MODIFIED_FINALISED(5);
|
||||
|
||||
private int type;
|
||||
|
||||
NotificationType(int type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public int getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public NotificationType fromInteger(int type) {
|
||||
switch (type) {
|
||||
case 0:
|
||||
return DMP_MODIFIED;
|
||||
case 1:
|
||||
return DATASET_MODIFIED;
|
||||
case 2:
|
||||
return DMP_PUBLISH;
|
||||
case 3:
|
||||
return DMP_FINALISED;
|
||||
case 4:
|
||||
return DMP_MODIFIED_FINALISED;
|
||||
case 5:
|
||||
return DATASET_MODIFIED_FINALISED;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported Notification Type");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
package eu.eudat.data.enumeration.notification;
|
||||
|
||||
public enum NotifyState {
|
||||
PENDING(0),
|
||||
PROCESSING(1),
|
||||
SENDING(2),
|
||||
SUCCEEDED(3),
|
||||
ERROR(4);
|
||||
|
||||
private int state;
|
||||
|
||||
NotifyState(int state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public int getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public NotifyState fromInteger(int state) {
|
||||
switch (state) {
|
||||
case 0:
|
||||
return PENDING;
|
||||
case 1:
|
||||
return PROCESSING;
|
||||
case 2:
|
||||
return SENDING;
|
||||
case 3:
|
||||
return SUCCEEDED;
|
||||
case 4:
|
||||
return ERROR;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported Notify State");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package eu.eudat.data.query.items.item.lock;
|
||||
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.query.definition.Query;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
public class LockCriteriaRequest extends Query<LockCriteria, Lock> {
|
||||
@Override
|
||||
public QueryableList<Lock> applyCriteria() {
|
||||
QueryableList<Lock> query = this.getQuery();
|
||||
if (this.getCriteria().getTouchedAt() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("touchedAt"), this.getCriteria().getTouchedAt()));
|
||||
if (this.getCriteria().getLockedBy() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("lockedBy"), this.getCriteria().getLockedBy())));
|
||||
if (this.getCriteria().getTarget() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("target"), this.getCriteria().getTarget())));
|
||||
return query;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package eu.eudat.data.query.items.table.lock;
|
||||
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.query.PaginationService;
|
||||
import eu.eudat.data.query.definition.Query;
|
||||
import eu.eudat.data.query.definition.TableQuery;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public class LockTableRequest extends TableQuery<LockCriteria, Lock, UUID> {
|
||||
@Override
|
||||
public QueryableList<Lock> applyCriteria() {
|
||||
QueryableList<Lock> query = this.getQuery();
|
||||
if (this.getCriteria().getTouchedAt() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("touchedAt"), this.getCriteria().getTouchedAt()));
|
||||
if (this.getCriteria().getLockedBy() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("lockedBy"), this.getCriteria().getLockedBy())));
|
||||
if (this.getCriteria().getTarget() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("target"), this.getCriteria().getTarget())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> applyPaging(QueryableList<Lock> items) {
|
||||
return PaginationService.applyPaging(items, this);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
package eu.eudat.query;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.queryable.types.FieldSelectionType;
|
||||
import eu.eudat.queryable.types.SelectionField;
|
||||
|
||||
import javax.persistence.criteria.Subquery;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class LockQuery extends Query<Lock, UUID> {
|
||||
|
||||
private UUID id;
|
||||
private UUID target;
|
||||
private UserQuery userQuery;
|
||||
private Date touchedAt;
|
||||
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public UUID getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(UUID target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public UserQuery getUserQuery() {
|
||||
return userQuery;
|
||||
}
|
||||
|
||||
public void setUserQuery(UserQuery userQuery) {
|
||||
this.userQuery = userQuery;
|
||||
}
|
||||
|
||||
public Date getTouchedAt() {
|
||||
return touchedAt;
|
||||
}
|
||||
|
||||
public void setTouchedAt(Date touchedAt) {
|
||||
this.touchedAt = touchedAt;
|
||||
}
|
||||
|
||||
public LockQuery(DatabaseAccessLayer<Lock, UUID> databaseAccessLayer, List<String> selectionFields) {
|
||||
super(databaseAccessLayer, selectionFields);
|
||||
}
|
||||
|
||||
public LockQuery(DatabaseAccessLayer<Lock, UUID> databaseAccessLayer) {
|
||||
super(databaseAccessLayer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> getQuery() {
|
||||
QueryableList<Lock> query = this.databaseAccessLayer.asQueryable();
|
||||
if (this.id != null) {
|
||||
query.where((builder, root) -> builder.equal(root.get("id"), this.id));
|
||||
}
|
||||
if (this.target != null) {
|
||||
query.where(((builder, root) -> builder.equal(root.get("target"), this.target)));
|
||||
}
|
||||
if (this.userQuery != null) {
|
||||
Subquery<UserInfo> userSubQuery = this.userQuery.getQuery().query(Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "id")));
|
||||
query.where((builder, root) -> root.get("lockedBy").get("id").in(userSubQuery));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
|
@ -3,12 +3,96 @@ package eu.eudat.elastic.criteria;
|
|||
import eu.eudat.elastic.entities.Tag;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 7/5/2018.
|
||||
*/
|
||||
public class DatasetCriteria extends Criteria {
|
||||
public List<Tag> tags;
|
||||
private String like;
|
||||
private List<UUID> datasetTemplates;
|
||||
private Short status;
|
||||
private List<UUID> dmps;
|
||||
private List<UUID> groupIds;
|
||||
private List<UUID> grants;
|
||||
private List<UUID> collaborators;
|
||||
private Boolean allowAllVersions;
|
||||
private List<String> organiztions;
|
||||
private List<Tag> tags;
|
||||
private boolean isPublic;
|
||||
private Short grantStatus;
|
||||
|
||||
public String getLike() {
|
||||
return like;
|
||||
}
|
||||
|
||||
public void setLike(String like) {
|
||||
this.like = like;
|
||||
}
|
||||
|
||||
public List<UUID> getDatasetTemplates() {
|
||||
return datasetTemplates;
|
||||
}
|
||||
|
||||
public void setDatasetTemplates(List<UUID> datasetTemplates) {
|
||||
this.datasetTemplates = datasetTemplates;
|
||||
}
|
||||
|
||||
public Short getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(Short status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public List<UUID> getDmps() {
|
||||
return dmps;
|
||||
}
|
||||
|
||||
public void setDmps(List<UUID> dmps) {
|
||||
this.dmps = dmps;
|
||||
}
|
||||
|
||||
public List<UUID> getGroupIds() {
|
||||
return groupIds;
|
||||
}
|
||||
|
||||
public void setGroupIds(List<UUID> groupIds) {
|
||||
this.groupIds = groupIds;
|
||||
}
|
||||
|
||||
public List<UUID> getGrants() {
|
||||
return grants;
|
||||
}
|
||||
|
||||
public void setGrants(List<UUID> grants) {
|
||||
this.grants = grants;
|
||||
}
|
||||
|
||||
public List<UUID> getCollaborators() {
|
||||
return collaborators;
|
||||
}
|
||||
|
||||
public void setCollaborators(List<UUID> collaborators) {
|
||||
this.collaborators = collaborators;
|
||||
}
|
||||
|
||||
public Boolean getAllowAllVersions() {
|
||||
return allowAllVersions;
|
||||
}
|
||||
|
||||
public void setAllowAllVersions(Boolean allowAllVersions) {
|
||||
this.allowAllVersions = allowAllVersions;
|
||||
}
|
||||
|
||||
public List<String> getOrganiztions() {
|
||||
return organiztions;
|
||||
}
|
||||
|
||||
public void setOrganiztions(List<String> organiztions) {
|
||||
this.organiztions = organiztions;
|
||||
}
|
||||
|
||||
public List<Tag> getTags() {
|
||||
return tags;
|
||||
|
@ -17,4 +101,20 @@ public class DatasetCriteria extends Criteria {
|
|||
public void setTags(List<Tag> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public boolean isPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
|
||||
public void setPublic(boolean aPublic) {
|
||||
isPublic = aPublic;
|
||||
}
|
||||
|
||||
public Short getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
|
||||
public void setGrantStatus(Short grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
package eu.eudat.elastic.entities;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
public class Collaborator implements ElasticEntity<Collaborator> {
|
||||
private String id;
|
||||
private String name;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toElasticEntity(XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("id", this.id);
|
||||
builder.field("name", this.name);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collaborator fromElasticEntity(Map fields) {
|
||||
this.id = (String) fields.get("id");
|
||||
this.name = (String) fields.get("name");
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -5,9 +5,8 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 7/5/2018.
|
||||
|
@ -15,8 +14,51 @@ import java.util.Map;
|
|||
public class Dataset implements ElasticEntity<Dataset> {
|
||||
private static final Logger logger = LoggerFactory.getLogger(Dataset.class);
|
||||
|
||||
public enum Status {
|
||||
SAVED((short) 0), FINALISED((short) 1), CANCELED((short) 2), DELETED((short) 99),;
|
||||
|
||||
private short value;
|
||||
|
||||
private Status(short value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public short getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public static Status fromInteger(int value) {
|
||||
switch (value) {
|
||||
case 0:
|
||||
return SAVED;
|
||||
case 1:
|
||||
return FINALISED;
|
||||
case 2:
|
||||
return CANCELED;
|
||||
case 99:
|
||||
return DELETED;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported Dataset Status");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String id;
|
||||
private List<Tag> tags = new LinkedList<>();
|
||||
private String label;
|
||||
private String description;
|
||||
private UUID template;
|
||||
private Short status;
|
||||
private UUID dmp;
|
||||
private UUID group;
|
||||
private UUID grant;
|
||||
private List<Collaborator> collaborators;
|
||||
private Boolean lastVersion;
|
||||
private Boolean lastPublicVersion;
|
||||
private List<Organization> organizations;
|
||||
private Boolean isPublic;
|
||||
private Short grantStatus;
|
||||
private String formData;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
|
@ -34,10 +76,158 @@ public class Dataset implements ElasticEntity<Dataset> {
|
|||
this.tags = tags;
|
||||
}
|
||||
|
||||
public String getLabel() {
|
||||
return label;
|
||||
}
|
||||
|
||||
public void setLabel(String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public UUID getTemplate() {
|
||||
return template;
|
||||
}
|
||||
|
||||
public void setTemplate(UUID template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
public Short getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(Short status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public UUID getDmp() {
|
||||
return dmp;
|
||||
}
|
||||
|
||||
public void setDmp(UUID dmp) {
|
||||
this.dmp = dmp;
|
||||
}
|
||||
|
||||
public UUID getGroup() {
|
||||
return group;
|
||||
}
|
||||
|
||||
public void setGroup(UUID group) {
|
||||
this.group = group;
|
||||
}
|
||||
|
||||
public UUID getGrant() {
|
||||
return grant;
|
||||
}
|
||||
|
||||
public void setGrant(UUID grant) {
|
||||
this.grant = grant;
|
||||
}
|
||||
|
||||
public List<Collaborator> getCollaborators() {
|
||||
return collaborators;
|
||||
}
|
||||
|
||||
public void setCollaborators(List<Collaborator> collaborators) {
|
||||
this.collaborators = collaborators;
|
||||
}
|
||||
|
||||
public Boolean getLastVersion() {
|
||||
return lastVersion;
|
||||
}
|
||||
|
||||
public void setLastVersion(Boolean lastVersion) {
|
||||
this.lastVersion = lastVersion;
|
||||
}
|
||||
|
||||
public Boolean getLastPublicVersion() {
|
||||
return lastPublicVersion;
|
||||
}
|
||||
|
||||
public void setLastPublicVersion(Boolean lastPublicVersion) {
|
||||
this.lastPublicVersion = lastPublicVersion;
|
||||
}
|
||||
|
||||
public List<Organization> getOrganizations() {
|
||||
return organizations;
|
||||
}
|
||||
|
||||
public void setOrganizations(List<Organization> organizations) {
|
||||
this.organizations = organizations;
|
||||
}
|
||||
|
||||
public Boolean getPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
|
||||
public void setPublic(Boolean aPublic) {
|
||||
isPublic = aPublic;
|
||||
}
|
||||
|
||||
public Short getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
|
||||
public void setGrantStatus(Short grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
}
|
||||
|
||||
public String getFormData() {
|
||||
return formData;
|
||||
}
|
||||
|
||||
public void setFormData(String formData) {
|
||||
this.formData = formData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toElasticEntity(XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("id", this.id);
|
||||
builder.field("label", this.label);
|
||||
builder.field("description", this.description);
|
||||
builder.field("template", this.template.toString());
|
||||
builder.field("status", this.status.toString());
|
||||
builder.field("dmp", this.dmp.toString());
|
||||
if (this.group != null) {
|
||||
builder.field("group", this.group.toString());
|
||||
}
|
||||
builder.field("grant", this.grant.toString());
|
||||
if (collaborators != null) {
|
||||
builder.startArray("collaborators");
|
||||
this.collaborators.forEach(x -> {
|
||||
try {
|
||||
x.toElasticEntity(builder);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
|
||||
});
|
||||
builder.endArray();
|
||||
}
|
||||
builder.field("lastVersion", this.lastVersion.toString());
|
||||
builder.field("lastPublicVersion", this.lastPublicVersion.toString());
|
||||
if (organizations != null) {
|
||||
builder.startArray("organizations");
|
||||
this.organizations.forEach(x -> {
|
||||
try {
|
||||
x.toElasticEntity(builder);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
|
||||
});
|
||||
builder.endArray();
|
||||
}
|
||||
if (this.tags != null) {
|
||||
builder.startArray("tags");
|
||||
this.tags.forEach(x -> {
|
||||
try {
|
||||
|
@ -47,6 +237,14 @@ public class Dataset implements ElasticEntity<Dataset> {
|
|||
}
|
||||
});
|
||||
builder.endArray();
|
||||
}
|
||||
if (this.isPublic != null) {
|
||||
builder.field("public", this.isPublic.toString());
|
||||
}
|
||||
if (this.grantStatus != null) {
|
||||
builder.field("grantStatus", this.grantStatus.toString());
|
||||
}
|
||||
builder.field("formData", this.formData);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
@ -55,7 +253,31 @@ public class Dataset implements ElasticEntity<Dataset> {
|
|||
public Dataset fromElasticEntity(Map<String, Object> fields) {
|
||||
if (fields != null) {
|
||||
this.id = (String) fields.get("id");
|
||||
this.tags = ((List<Tag>) fields.get("tags"));
|
||||
if (fields.get("tags") != null) {
|
||||
this.tags = ((List<HashMap>) fields.get("tags")).stream().map(hashMap -> new Tag().fromElasticEntity(hashMap)).collect(Collectors.toList());
|
||||
}
|
||||
this.label = (String) fields.get("label");
|
||||
this.description = (String) fields.get("description");
|
||||
this.template = UUID.fromString((String) fields.get("template"));
|
||||
this.status = Short.valueOf((String) fields.get("status"));
|
||||
this.dmp = UUID.fromString((String) fields.get("dmp"));
|
||||
this.group = UUID.fromString((String) fields.get("group"));
|
||||
this.grant = UUID.fromString((String) fields.get("grant"));
|
||||
if (fields.get("collaborators") != null) {
|
||||
this.collaborators = ((List<HashMap>) fields.get("collaborators")).stream().map(hashMap -> new Collaborator().fromElasticEntity(hashMap)).collect(Collectors.toList());
|
||||
}
|
||||
this.lastVersion = Boolean.parseBoolean((String) fields.get("lastVersion"));
|
||||
this.lastPublicVersion = Boolean.parseBoolean((String) fields.get("lastPublicVersion"));
|
||||
if (fields.get("organizations") != null) {
|
||||
this.organizations = ((List<HashMap>) fields.get("organizations")).stream().map(hashMap -> new Organization().fromElasticEntity(hashMap)).collect(Collectors.toList());
|
||||
}
|
||||
if (fields.get("public") != null) {
|
||||
this.isPublic = Boolean.valueOf((String) fields.get("public"));
|
||||
}
|
||||
if (fields.get("grantStatus") != null) {
|
||||
this.grantStatus = Short.valueOf((String) fields.get("grantStatus"));
|
||||
}
|
||||
this.formData = (String) fields.get("formData");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package eu.eudat.elastic.entities;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class Organization implements ElasticEntity<Organization> {
|
||||
private String id;
|
||||
private String name;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toElasticEntity(XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("id", this.id);
|
||||
builder.field("name", this.name);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Organization fromElasticEntity(Map fields) {
|
||||
this.id = (String) fields.get("id");
|
||||
this.name = (String) fields.get("name");
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -39,7 +39,9 @@ public class Tag implements ElasticEntity {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object fromElasticEntity(Map fields) {
|
||||
return null;
|
||||
public Tag fromElasticEntity(Map fields) {
|
||||
this.id = (String) fields.get("id");
|
||||
this.name = (String) fields.get("name");
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,18 +2,22 @@ package eu.eudat.elastic.repository;
|
|||
|
||||
import eu.eudat.elastic.criteria.DatasetCriteria;
|
||||
import eu.eudat.elastic.entities.Dataset;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.core.CountRequest;
|
||||
import org.elasticsearch.client.core.CountResponse;
|
||||
import org.elasticsearch.client.indices.GetIndexRequest;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
@ -21,12 +25,10 @@ import org.springframework.stereotype.Service;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 7/5/2018.
|
||||
*/
|
||||
@Service("datasetRepository")
|
||||
public class DatasetRepository extends ElasticRepository<Dataset, DatasetCriteria> {
|
||||
|
||||
|
@ -38,35 +40,110 @@ public class DatasetRepository extends ElasticRepository<Dataset, DatasetCriteri
|
|||
@Override
|
||||
public Dataset createOrUpdate(Dataset entity) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
IndexRequest request = new IndexRequest("datasets", "doc", entity.getId()).source(entity.toElasticEntity(builder));
|
||||
this.getClient().index(request);
|
||||
IndexRequest request = new IndexRequest("datasets").id(entity.getId()).source(entity.toElasticEntity(builder));//new IndexRequest("datasets", "doc", entity.getId()).source(entity.toElasticEntity(builder));
|
||||
this.getClient().index(request, RequestOptions.DEFAULT);
|
||||
return entity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Dataset findDocument(String id) throws IOException {
|
||||
GetRequest request = new GetRequest("datasets","doc",id);
|
||||
GetResponse response = this.getClient().get(request);
|
||||
GetRequest request = new GetRequest("datasets",id);
|
||||
GetResponse response = this.getClient().get(request, RequestOptions.DEFAULT);
|
||||
return new Dataset().fromElasticEntity(response.getSourceAsMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Dataset> query(DatasetCriteria criteria) throws ExecutionException, InterruptedException, IOException {
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
public List<Dataset> query(DatasetCriteria criteria) throws IOException {
|
||||
SearchRequest searchRequest = new SearchRequest("datasets");
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
|
||||
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.termsQuery("tags.name.keyword", criteria.getTags().stream().map(x -> x.getName()).collect(Collectors.toList())));
|
||||
CountRequest countRequest = new CountRequest("datasets");
|
||||
countRequest.query(QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery("status.keyword", Stream.of(Dataset.Status.DELETED.getValue(), Dataset.Status.CANCELED.getValue()).collect(Collectors.toList()))));
|
||||
CountResponse countResponse = getClient().count(countRequest, RequestOptions.DEFAULT);
|
||||
Long count = countResponse.getCount();
|
||||
|
||||
searchSourceBuilder.size(count.intValue());
|
||||
|
||||
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery("status.keyword", Stream.of(Dataset.Status.DELETED.getValue(), Dataset.Status.CANCELED.getValue()).collect(Collectors.toList())));
|
||||
if (criteria.isPublic()) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("public.keyword", "true"));
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("status.keyword", Dataset.Status.FINALISED.getValue()));
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("lastPublicVersion.keyword", "true"));
|
||||
}
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty()) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.queryStringQuery(criteria.getLike()).fields(Stream.of(new Object[][] {
|
||||
{ "label", 1.0f },
|
||||
{ "description", 1.0f },
|
||||
{ "formData", 1.0f }
|
||||
}).collect(Collectors.toMap(data -> (String) data[0], data -> (Float) data[1]))));
|
||||
}
|
||||
|
||||
if (criteria.getDatasetTemplates() != null && criteria.getDatasetTemplates().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("template.keyword", criteria.getDatasetTemplates().stream().map(UUID::toString).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (criteria.getStatus() != null) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("status.keyword", criteria.getStatus().toString()));
|
||||
}
|
||||
|
||||
if (criteria.getDmps() != null && criteria.getDmps().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("dmp.keyword", criteria.getDmps().stream().map(UUID::toString).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (criteria.getGroupIds() != null && criteria.getGroupIds().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("group.keyword", criteria.getGroupIds().stream().map(UUID::toString).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (criteria.getGrants() != null && criteria.getGrants().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("grant.keyword", criteria.getGrants().stream().map(UUID::toString).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (criteria.getGrantStatus() != null) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("grantStatus.keyword", criteria.getGrantStatus().toString()));
|
||||
}
|
||||
|
||||
if (criteria.getCollaborators() != null && criteria.getCollaborators().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("collaborators.id.keyword", criteria.getCollaborators().stream().map(UUID::toString).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (!criteria.isPublic()) {
|
||||
if (criteria.getAllowAllVersions() != null && !criteria.getAllowAllVersions()) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termQuery("lastVersion.keyword", "true"));
|
||||
}
|
||||
}
|
||||
|
||||
if (criteria.getOrganiztions() != null && criteria.getOrganiztions().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("organizations.id.keyword", criteria.getOrganiztions()));
|
||||
}
|
||||
|
||||
if (criteria.getTags() != null && criteria.getTags().size() > 0) {
|
||||
boolQuery = boolQuery.should(QueryBuilders.termsQuery("tags.name.keyword", criteria.getTags().stream().map(Tag::getName).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
if (boolQuery.should().isEmpty() && boolQuery.mustNot().isEmpty()) {
|
||||
boolQuery.should(QueryBuilders.matchAllQuery());
|
||||
} else {
|
||||
boolQuery.minimumShouldMatch(boolQuery.should().size());
|
||||
}
|
||||
searchSourceBuilder.query(boolQuery);
|
||||
searchRequest.source(searchSourceBuilder);
|
||||
SearchResponse response = this.getClient().search(searchRequest);
|
||||
SearchResponse response = this.getClient().search(searchRequest, RequestOptions.DEFAULT);
|
||||
return Arrays.stream(response.getHits().getHits()).map(x -> this.transformFromString(x.getSourceAsString(), Dataset.class)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists() throws IOException {
|
||||
GetIndexRequest request = new GetIndexRequest();
|
||||
request.indices("datasets");
|
||||
return this.getClient().indices().exists(request);
|
||||
GetIndexRequest request = new GetIndexRequest("datasets");
|
||||
// request.indices("datasets");
|
||||
return this.getClient().indices().exists(request, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() throws IOException {
|
||||
if (exists()) {
|
||||
DeleteByQueryRequest delete = new DeleteByQueryRequest("datasets");
|
||||
delete.setQuery(QueryBuilders.matchAllQuery());
|
||||
this.getClient().deleteByQuery(delete, RequestOptions.DEFAULT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,4 +19,6 @@ public interface Repository<ET extends ElasticEntity, C extends Criteria> {
|
|||
List<ET> query(C criteria) throws ExecutionException, InterruptedException, IOException;
|
||||
|
||||
boolean exists() throws IOException;
|
||||
|
||||
void clear() throws IOException;
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
#Generated by Apache Maven
|
||||
#Mon Jul 15 19:37:19 EEST 2019
|
||||
version=1.0-SNAPSHOT
|
||||
groupId=dmp-backend
|
||||
artifactId=logging
|
|
@ -43,7 +43,7 @@
|
|||
<slf4j.version>1.7.12</slf4j.version>
|
||||
<jetty.version>9.0.7.v20131107
|
||||
</jetty.version> <!-- Adapt this to a version found on http://repo.maven.apache.org/maven2/org/eclipse/jetty/jetty-maven-plugin/ -->
|
||||
<logback.version>1.1.1</logback.version>
|
||||
<logback.version>1.2.3</logback.version>
|
||||
<javax.inject.version>1</javax.inject.version>
|
||||
<javax.servlet.servlet-api.version>3.0.1</javax.servlet.servlet-api.version>
|
||||
<docker.image.prefix>ikalyvas</docker.image.prefix>
|
||||
|
@ -70,13 +70,13 @@
|
|||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>6.3.1</version>
|
||||
<version>7.6.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<version>6.3.1</version>
|
||||
<version>7.6.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
|
@ -188,7 +188,13 @@
|
|||
<dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>transport</artifactId>
|
||||
<version>6.3.0</version>
|
||||
<version>7.6.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-to-slf4j</artifactId>
|
||||
<version>2.8.2</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -1,20 +1,17 @@
|
|||
package eu.eudat.configurations;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.transport.client.PreBuiltTransportClient;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.core.env.Environment;
|
||||
|
||||
import java.net.InetAddress;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 7/5/2018.
|
||||
*/
|
||||
|
@ -28,12 +25,18 @@ public class ElasticSearchConfiguration {
|
|||
this.environment = environment;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Bean(destroyMethod = "close")
|
||||
public RestHighLevelClient client() throws Exception {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY,
|
||||
new UsernamePasswordCredentials(this.environment.getProperty("elasticsearch.username"), this.environment.getProperty("elasticsearch.password")));
|
||||
|
||||
RestHighLevelClient client = new RestHighLevelClient(
|
||||
RestClient.builder(
|
||||
new HttpHost(this.environment.getProperty("elasticsearch.host"),
|
||||
Integer.parseInt(this.environment.getProperty("elasticsearch.port")), "http")));
|
||||
Integer.parseInt(this.environment.getProperty("elasticsearch.port")), "http"))
|
||||
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
|
||||
.setDefaultCredentialsProvider(credentialsProvider)));
|
||||
return client;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import eu.eudat.logic.services.operations.authentication.AuthenticationService;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.scheduling.annotation.EnableAsync;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
|
||||
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
|
||||
|
@ -15,6 +16,7 @@ import java.util.List;
|
|||
|
||||
@EnableAsync
|
||||
@Configuration
|
||||
@EnableScheduling
|
||||
public class WebMVCConfiguration extends WebMvcConfigurerAdapter {
|
||||
|
||||
private ApiContext apiContext;
|
||||
|
|
|
@ -11,6 +11,7 @@ import eu.eudat.data.query.items.table.dmp.DataManagementPlanTableRequest;
|
|||
import eu.eudat.data.query.items.table.dmp.DataManagmentPlanPublicTableRequest;
|
||||
import eu.eudat.exceptions.datamanagementplan.DMPNewVersionException;
|
||||
import eu.eudat.exceptions.datamanagementplan.DMPWithDatasetsDeleteException;
|
||||
import eu.eudat.exceptions.security.UnauthorisedException;
|
||||
import eu.eudat.logic.managers.DataManagementPlanManager;
|
||||
import eu.eudat.logic.managers.DatasetManager;
|
||||
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
|
||||
|
@ -52,6 +53,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.springframework.http.MediaType.APPLICATION_ATOM_XML;
|
||||
import static org.springframework.http.MediaType.APPLICATION_JSON;
|
||||
|
||||
|
||||
@RestController
|
||||
@CrossOrigin
|
||||
|
@ -123,9 +127,13 @@ public class DMPs extends BaseController {
|
|||
DataManagementPlanOverviewModel dataManagementPlan = this.dataManagementPlanManager.getOverviewSingle(id, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.NO_MESSAGE).payload(dataManagementPlan));
|
||||
} catch (Exception e) {
|
||||
if (e instanceof UnauthorisedException) {
|
||||
return ResponseEntity.status(HttpStatus.FORBIDDEN).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.ERROR_MESSAGE));
|
||||
} else {
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.ERROR_MESSAGE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"/public/{id}"})
|
||||
public @ResponseBody
|
||||
|
@ -219,8 +227,12 @@ public class DMPs extends BaseController {
|
|||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.POST, value = {"/upload"})
|
||||
public ResponseEntity<ResponseItem> dmpXmlUpload(@RequestParam("file") MultipartFile[] files, Principal principal) throws Exception {
|
||||
public ResponseEntity<ResponseItem> dmpUpload(@RequestParam("file") MultipartFile[] files, Principal principal) throws Exception {
|
||||
if (files[0].getContentType().equals(APPLICATION_JSON.toString())) {
|
||||
this.dataManagementPlanManager.createFromRDA(files, principal);
|
||||
} else if (files[0].getContentType().equals(APPLICATION_ATOM_XML.toString())) {
|
||||
this.dataManagementPlanManager.createDmpFromXml(this.getApiContext(), files, principal);
|
||||
}
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<List>()
|
||||
.status(ApiMessageCode.SUCCESS_MESSAGE));
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.springframework.http.HttpStatus;
|
|||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
@RestController
|
||||
|
@ -36,7 +37,7 @@ public class DashBoardController extends BaseController {
|
|||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"/dashboard/me/getStatistics"}, produces = "application/json")
|
||||
public ResponseEntity<ResponseItem<DashBoardStatistics>> getStatistics(Principal principal) {
|
||||
public ResponseEntity<ResponseItem<DashBoardStatistics>> getStatistics(Principal principal) throws IOException {
|
||||
DashBoardStatistics statistics = dashBoardManager.getMeStatistics(principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DashBoardStatistics>().status(ApiMessageCode.NO_MESSAGE).payload(statistics));
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import eu.eudat.data.entities.Dataset;
|
|||
import eu.eudat.data.query.items.item.dataset.DatasetWizardAutocompleteRequest;
|
||||
import eu.eudat.data.query.items.item.datasetprofile.DatasetProfileWizardAutocompleteRequest;
|
||||
import eu.eudat.exceptions.datasetwizard.DatasetWizardCannotUnlockException;
|
||||
import eu.eudat.exceptions.security.UnauthorisedException;
|
||||
import eu.eudat.logic.managers.DatasetManager;
|
||||
import eu.eudat.logic.managers.DatasetWizardManager;
|
||||
import eu.eudat.logic.managers.UserManager;
|
||||
|
@ -15,6 +16,7 @@ import eu.eudat.models.data.datasetwizard.DataManagentPlanListingModel;
|
|||
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
|
||||
import eu.eudat.models.data.dmp.AssociatedProfile;
|
||||
import eu.eudat.models.data.helpers.responses.ResponseItem;
|
||||
import eu.eudat.models.data.listingmodels.DataManagementPlanOverviewModel;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.models.data.user.composite.PagedDatasetProfile;
|
||||
import eu.eudat.types.ApiMessageCode;
|
||||
|
@ -30,6 +32,7 @@ import org.springframework.http.ResponseEntity;
|
|||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.transaction.Transactional;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
|
@ -79,13 +82,13 @@ public class DatasetWizardController extends BaseController {
|
|||
@Transactional
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"{id}"}, produces = "application/json")
|
||||
public @ResponseBody
|
||||
ResponseEntity getSingle(@PathVariable String id, @RequestHeader("Content-Type") String contentType, @ClaimedAuthorities(claims = {ANONYMOUS}) Principal principal) throws IllegalAccessException, IOException, InstantiationException {
|
||||
if(contentType.equals("application/xml")) {
|
||||
ResponseEntity getSingle(@PathVariable String id, @RequestHeader("Content-Type") String contentType, Principal principal) throws IllegalAccessException, IOException, InstantiationException {
|
||||
try {
|
||||
if (contentType.equals("application/xml")) {
|
||||
VisibilityRuleService visibilityRuleService = this.getApiContext().getUtilitiesService().getVisibilityRuleService();
|
||||
return this.datasetManager.getDocument(id, visibilityRuleService, contentType);
|
||||
}
|
||||
else if (contentType.equals("application/msword")) {
|
||||
File file = datasetManager.getWordDocument(this.configLoader, id, this.getApiContext().getUtilitiesService().getVisibilityRuleService());
|
||||
return this.datasetManager.getDocument(id, visibilityRuleService, contentType, principal);
|
||||
} else if (contentType.equals("application/msword")) {
|
||||
File file = datasetManager.getWordDocumentFile(this.configLoader, id, this.getApiContext().getUtilitiesService().getVisibilityRuleService(), principal);
|
||||
InputStream resource = new FileInputStream(file);
|
||||
HttpHeaders responseHeaders = new HttpHeaders();
|
||||
responseHeaders.setContentLength(file.length());
|
||||
|
@ -100,11 +103,22 @@ public class DatasetWizardController extends BaseController {
|
|||
return new ResponseEntity<>(content,
|
||||
responseHeaders,
|
||||
HttpStatus.OK);
|
||||
}
|
||||
else {
|
||||
DatasetWizardModel dataset = this.datasetManager.getSingle(id);
|
||||
} else {
|
||||
DatasetWizardModel dataset = this.datasetManager.getSingle(id, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DatasetWizardModel>().status(ApiMessageCode.NO_MESSAGE).payload(dataset));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (e instanceof UnauthorisedException) {
|
||||
if (e instanceof UnauthorisedException) {
|
||||
return ResponseEntity.status(HttpStatus.FORBIDDEN).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.ERROR_MESSAGE));
|
||||
} else if (e instanceof NoResultException) {
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(new ResponseItem<DataManagementPlanOverviewModel>().status(ApiMessageCode.ERROR_MESSAGE));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null; // ????
|
||||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"/public/{id}"}, produces = "application/json")
|
||||
|
@ -137,8 +151,8 @@ public class DatasetWizardController extends BaseController {
|
|||
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"/getPDF/{id}"})
|
||||
public @ResponseBody
|
||||
ResponseEntity<byte[]> getPDFDocument(@PathVariable String id) throws IllegalAccessException, IOException, InstantiationException, InterruptedException {
|
||||
File file = datasetManager.getWordDocument(this.configLoader, id, this.getApiContext().getUtilitiesService().getVisibilityRuleService());
|
||||
ResponseEntity<byte[]> getPDFDocument(@PathVariable String id, Principal principal) throws IllegalAccessException, IOException, InstantiationException, InterruptedException {
|
||||
File file = datasetManager.getWordDocumentFile(this.configLoader, id, this.getApiContext().getUtilitiesService().getVisibilityRuleService(), principal);
|
||||
String fileName = file.getName();
|
||||
if (fileName.endsWith(".docx")){
|
||||
fileName = fileName.substring(0, fileName.length() - 5);
|
||||
|
|
|
@ -39,7 +39,7 @@ public class Datasets extends BaseController {
|
|||
|
||||
@RequestMapping(method = RequestMethod.POST, value = {"paged"}, consumes = "application/json", produces = "application/json")
|
||||
public @ResponseBody
|
||||
ResponseEntity<ResponseItem<DataTableData<DatasetListingModel>>> getPaged(@RequestBody DatasetTableRequest datasetTableRequest, Principal principal) throws Exception {
|
||||
ResponseEntity<ResponseItem<DataTableData<DatasetListingModel>>> getPaged(@RequestBody DatasetTableRequest datasetTableRequest, @ClaimedAuthorities(claims = {Authorities.ADMIN, Authorities.MANAGER, Authorities.USER, Authorities.ANONYMOUS}) Principal principal) throws Exception {
|
||||
DataTableData<DatasetListingModel> dataTable = this.datasetManager.getPaged(datasetTableRequest, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DataTableData<DatasetListingModel>>().status(ApiMessageCode.NO_MESSAGE).payload(dataTable));
|
||||
}
|
||||
|
@ -66,5 +66,21 @@ public class Datasets extends BaseController {
|
|||
DataTableData<DatasetProfileListingModel> datasetProfileTableData = this.datasetManager.getDatasetProfilesUsedByDatasets(datasetProfileTableRequestItem, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<DataTableData<DatasetProfileListingModel>>().status(ApiMessageCode.NO_MESSAGE).payload(datasetProfileTableData));
|
||||
}
|
||||
|
||||
@javax.transaction.Transactional
|
||||
@RequestMapping(method = RequestMethod.POST, value = {"/index"})
|
||||
public @ResponseBody
|
||||
ResponseEntity<ResponseItem<Dataset>> generateIndex(Principal principal) throws Exception {
|
||||
this.datasetManager.generateIndex(principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<eu.eudat.data.entities.Dataset>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Generated").payload(null));
|
||||
}
|
||||
|
||||
@javax.transaction.Transactional
|
||||
@RequestMapping(method = RequestMethod.DELETE, value = {"/index"})
|
||||
public @ResponseBody
|
||||
ResponseEntity<ResponseItem<Dataset>> clearIndex(Principal principal) throws Exception {
|
||||
this.datasetManager.clearIndex(principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<eu.eudat.data.entities.Dataset>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Cleared").payload(null));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ import java.util.UUID;
|
|||
@RequestMapping(value = {"/api/language/"})
|
||||
public class LanguageController {
|
||||
|
||||
Environment environment;
|
||||
private Environment environment;
|
||||
|
||||
@Autowired
|
||||
public LanguageController(Environment environment) {
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
package eu.eudat.controllers;
|
||||
|
||||
import com.sun.org.apache.xpath.internal.operations.Bool;
|
||||
import eu.eudat.logic.managers.LockManager;
|
||||
import eu.eudat.models.data.dmp.DataManagementPlan;
|
||||
import eu.eudat.models.data.helpers.responses.ResponseItem;
|
||||
import eu.eudat.models.data.lock.Lock;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.types.ApiMessageCode;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
@RestController
|
||||
@CrossOrigin
|
||||
@RequestMapping(value = {"/api/lock/"})
|
||||
public class LockController {
|
||||
|
||||
private LockManager lockManager;
|
||||
|
||||
@Autowired
|
||||
public LockController(LockManager lockManager) {
|
||||
this.lockManager = lockManager;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@RequestMapping(method = RequestMethod.GET, path = "target/status/{id}")
|
||||
public @ResponseBody ResponseEntity<ResponseItem<Boolean>> getLocked(@PathVariable String id, Principal principal) throws Exception {
|
||||
boolean locked = this.lockManager.isLocked(id, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<Boolean>().status(ApiMessageCode.SUCCESS_MESSAGE).message("locked").payload(locked));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@RequestMapping(method = RequestMethod.DELETE, path = "target/unlock/{id}")
|
||||
public @ResponseBody ResponseEntity<ResponseItem<String>> unlock(@PathVariable String id, Principal principal) throws Exception {
|
||||
this.lockManager.unlock(id, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<String>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Created").payload("Lock Removed"));
|
||||
}
|
||||
|
||||
@Transactional
|
||||
@RequestMapping(method = RequestMethod.POST, consumes = "application/json", produces = "application/json")
|
||||
public @ResponseBody ResponseEntity<ResponseItem<UUID>> createOrUpdate(@RequestBody Lock lock, Principal principal) throws Exception {
|
||||
eu.eudat.data.entities.Lock result = this.lockManager.createOrUpdate(lock, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<UUID>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Created").payload(result.getId()));
|
||||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "target/{id}")
|
||||
public @ResponseBody ResponseEntity<ResponseItem<Lock>> getSingle(@PathVariable String id, Principal principal) throws Exception {
|
||||
Lock lock = this.lockManager.getFromTarget(id, principal);
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<Lock>().status(ApiMessageCode.NO_MESSAGE).payload(lock));
|
||||
}
|
||||
}
|
|
@ -1,30 +1,27 @@
|
|||
package eu.eudat.controllers;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.elastic.criteria.DatasetCriteria;
|
||||
import eu.eudat.elastic.criteria.TagCriteria;
|
||||
import eu.eudat.elastic.entities.Dataset;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.elastic.repository.Repository;
|
||||
import eu.eudat.logic.proxy.config.ExternalUrlCriteria;
|
||||
import eu.eudat.logic.proxy.config.exceptions.HugeResultSet;
|
||||
import eu.eudat.logic.proxy.config.exceptions.NoURLFound;
|
||||
import eu.eudat.logic.services.ApiContext;
|
||||
import eu.eudat.models.data.external.TagExternalSourcesModel;
|
||||
import eu.eudat.logic.utilities.helpers.StreamDistinctBy;
|
||||
import eu.eudat.models.data.helpers.responses.ResponseItem;
|
||||
import eu.eudat.types.ApiMessageCode;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 7/5/2018.
|
||||
|
@ -46,12 +43,13 @@ public class TagController extends BaseController {
|
|||
|
||||
@RequestMapping(method = RequestMethod.GET, value = {"/external/tags"}, produces = "application/json")
|
||||
public @ResponseBody
|
||||
ResponseEntity<ResponseItem<TagExternalSourcesModel>> listExternalTagModel(
|
||||
@RequestParam(value = "query", required = false) String query, @RequestParam(value = "type", required = false) String type) throws HugeResultSet, NoURLFound, IOException {
|
||||
ExternalUrlCriteria externalUrlCriteria = new ExternalUrlCriteria(query);
|
||||
List<Map<String, String>> remoteRepos = this.getApiContext().getOperationsContext().getRemoteFetcher().getTags(externalUrlCriteria, type);
|
||||
TagExternalSourcesModel researchersExternalSourcesModel = new TagExternalSourcesModel().fromExternalItem(remoteRepos);
|
||||
ResponseEntity<ResponseItem<List<Tag>>> listExternalTagModel(
|
||||
@RequestParam(value = "query", required = false) String query, @RequestParam(value = "type", required = false) String type) throws HugeResultSet, NoURLFound, IOException, ExecutionException, InterruptedException {
|
||||
//ExternalUrlCriteria externalUrlCriteria = new ExternalUrlCriteria(query);
|
||||
/*List<Map<String, String>> remoteRepos = this.getApiContext().getOperationsContext().getRemoteFetcher().getTags(externalUrlCriteria, type);
|
||||
TagExternalSourcesModel researchersExternalSourcesModel = new TagExternalSourcesModel().fromExternalItem(remoteRepos);*/
|
||||
List<Tag> tags = this.getApiContext().getOperationsContext().getDatasetRepository().query(new DatasetCriteria()).stream().map(Dataset::getTags).flatMap(Collection::stream).filter(StreamDistinctBy.distinctByKey(Tag::getId)).collect(Collectors.toList());
|
||||
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<TagExternalSourcesModel>().payload(researchersExternalSourcesModel).status(ApiMessageCode.NO_MESSAGE));
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<List<Tag>>().payload(tags).status(ApiMessageCode.NO_MESSAGE));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
package eu.eudat.controllers;
|
||||
|
||||
import eu.eudat.models.data.helpers.responses.ResponseItem;
|
||||
import eu.eudat.models.data.userguide.UserGuide;
|
||||
import eu.eudat.types.ApiMessageCode;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@RestController
|
||||
@CrossOrigin
|
||||
@RequestMapping(value = {"/api/userguide/"})
|
||||
public class UserGuideController {
|
||||
|
||||
private Environment environment;
|
||||
|
||||
@Autowired
|
||||
public UserGuideController(Environment environment) {
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
@RequestMapping(path = "current", method = RequestMethod.GET )
|
||||
public ResponseEntity getUserGuide() throws IOException {
|
||||
Stream<Path> walk = Files.walk(Paths.get(this.environment.getProperty("userguide.path")));
|
||||
List<String> result = walk.filter(Files::isRegularFile)
|
||||
.map(Path::toString).collect(Collectors.toList());
|
||||
|
||||
String fileName = result.get(0);
|
||||
InputStream is = new FileInputStream(fileName);
|
||||
|
||||
String[] filepath = fileName.split("\\.")[0].split("\\\\");
|
||||
String simplename = filepath[filepath.length - 1];
|
||||
|
||||
HttpHeaders responseHeaders = new HttpHeaders();
|
||||
responseHeaders.setContentLength(is.available());
|
||||
responseHeaders.setContentType(MediaType.TEXT_HTML);
|
||||
responseHeaders.set("Content-Disposition", "attachment;filename=" + simplename);
|
||||
responseHeaders.set("Access-Control-Expose-Headers", "Content-Disposition");
|
||||
responseHeaders.get("Access-Control-Expose-Headers").add("Content-Type");
|
||||
|
||||
byte[] content = new byte[is.available()];
|
||||
is.read(content);
|
||||
is.close();
|
||||
|
||||
return new ResponseEntity<>(content, responseHeaders, HttpStatus.OK);
|
||||
|
||||
}
|
||||
|
||||
@RequestMapping(value = "current", method = RequestMethod.POST)
|
||||
public @ResponseBody
|
||||
ResponseEntity<ResponseItem<String>> updateGuide(@RequestBody UserGuide guide) throws Exception {
|
||||
String fileName = this.environment.getProperty("userguide.path") + guide.getName() + ".html";
|
||||
OutputStream os = new FileOutputStream(fileName);
|
||||
os.write(guide.getHtml().getBytes());
|
||||
os.close();
|
||||
return ResponseEntity.status(HttpStatus.OK).body(new ResponseItem<String>().status(ApiMessageCode.SUCCESS_MESSAGE).message("Updated").payload("Updated"));
|
||||
}
|
||||
}
|
|
@ -1,14 +1,12 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanCriteria;
|
||||
import eu.eudat.data.dao.criteria.DatasetCriteria;
|
||||
import eu.eudat.data.dao.criteria.OrganisationCriteria;
|
||||
import eu.eudat.data.dao.criteria.GrantCriteria;
|
||||
import eu.eudat.data.dao.criteria.*;
|
||||
import eu.eudat.data.dao.entities.DMPDao;
|
||||
import eu.eudat.data.dao.entities.DatasetDao;
|
||||
import eu.eudat.data.dao.entities.OrganisationDao;
|
||||
import eu.eudat.data.dao.entities.GrantDao;
|
||||
import eu.eudat.data.entities.*;
|
||||
import eu.eudat.data.query.items.table.dmp.DataManagmentPlanPublicTableRequest;
|
||||
import eu.eudat.logic.builders.model.models.RecentActivityDataBuilder;
|
||||
import eu.eudat.logic.services.ApiContext;
|
||||
import eu.eudat.logic.services.operations.DatabaseRepository;
|
||||
|
@ -18,15 +16,21 @@ import eu.eudat.models.data.dashboard.searchbar.SearchBarItem;
|
|||
import eu.eudat.models.data.dashboard.statistics.DashBoardStatistics;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.types.searchbar.SearchBarItemType;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Component
|
||||
public class DashBoardManager {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DashBoardManager.class);
|
||||
|
||||
private ApiContext apiContext;
|
||||
private DatabaseRepository databaseRepository;
|
||||
|
@ -40,13 +44,25 @@ public class DashBoardManager {
|
|||
public DashBoardStatistics getStatistics() {
|
||||
DashBoardStatistics statistics = new DashBoardStatistics();
|
||||
|
||||
DataManagementPlanCriteria dataManagementPlanCriteria = new DataManagementPlanCriteria();
|
||||
DataManagmentPlanPublicTableRequest publicTableRequest = new DataManagmentPlanPublicTableRequest();
|
||||
|
||||
DataManagementPlanPublicCriteria publicCriteria = new DataManagementPlanPublicCriteria();
|
||||
|
||||
publicCriteria.setAllVersions(false);
|
||||
|
||||
publicTableRequest.setCriteria(publicCriteria);
|
||||
|
||||
publicTableRequest.setQuery(databaseRepository.getDmpDao().asQueryable());
|
||||
|
||||
List<DMP> dmps = publicTableRequest.applyCriteria().toList();
|
||||
|
||||
// DataManagementPlanCriteria dataManagementPlanCriteria = new DataManagementPlanCriteria();
|
||||
OrganisationCriteria organisationCriteria = new OrganisationCriteria();
|
||||
dataManagementPlanCriteria.setAllVersions(false);
|
||||
dataManagementPlanCriteria.setIsPublic(true);
|
||||
/*dataManagementPlanCriteria.setAllVersions(false);
|
||||
dataManagementPlanCriteria.setIsPublic(true);*/
|
||||
organisationCriteria.setPublic(true);
|
||||
|
||||
List<DMP> dmps = databaseRepository.getDmpDao().getWithCriteria(dataManagementPlanCriteria).toList();
|
||||
// List<DMP> dmps = databaseRepository.getDmpDao().getWithCriteria(dataManagementPlanCriteria).toList();
|
||||
long numberOfDatasets = 0;
|
||||
LinkedList<Grant> grants = new LinkedList<>();
|
||||
for (DMP dmp : dmps) {
|
||||
|
@ -63,7 +79,8 @@ public class DashBoardManager {
|
|||
return statistics;
|
||||
}
|
||||
|
||||
public DashBoardStatistics getMeStatistics(Principal principal) {
|
||||
public DashBoardStatistics getMeStatistics(Principal principal) throws IOException {
|
||||
List<eu.eudat.elastic.entities.Dataset> datasets = null;
|
||||
DashBoardStatistics statistics = new DashBoardStatistics();
|
||||
DMPDao dataManagementPlanRepository = databaseRepository.getDmpDao();
|
||||
DatasetDao datasetRepository = databaseRepository.getDatasetDao();
|
||||
|
@ -72,7 +89,19 @@ public class DashBoardManager {
|
|||
UserInfo user = new UserInfo();
|
||||
user.setId(principal.getId());
|
||||
DatasetCriteria datasetCriteria = new DatasetCriteria();
|
||||
if (apiContext.getOperationsContext().getDatasetRepository() != null) {
|
||||
try {
|
||||
eu.eudat.elastic.criteria.DatasetCriteria datasetElasticCriteria = new eu.eudat.elastic.criteria.DatasetCriteria();
|
||||
datasetElasticCriteria.setAllowAllVersions(false);
|
||||
datasetElasticCriteria.setPublic(false);
|
||||
datasets = apiContext.getOperationsContext().getDatasetRepository().query(datasetElasticCriteria);
|
||||
}catch (Exception e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
datasets = null;
|
||||
}
|
||||
}
|
||||
datasetCriteria.setAllVersions(false);
|
||||
datasetCriteria.setIsPublic(false);
|
||||
DataManagementPlanCriteria dataManagementPlanCriteria = new DataManagementPlanCriteria();
|
||||
dataManagementPlanCriteria.setAllVersions(false);
|
||||
GrantCriteria grantCriteria = new GrantCriteria();
|
||||
|
@ -80,7 +109,8 @@ public class DashBoardManager {
|
|||
List<Integer> roles = new LinkedList<>();
|
||||
CompletableFuture dmpFuture = dataManagementPlanRepository.getAuthenticated(dataManagementPlanRepository.getWithCriteria(dataManagementPlanCriteria), principal.getId(), roles).countAsync()
|
||||
.whenComplete((dmpsStats, throwable) -> statistics.setTotalDataManagementPlanCount(dmpsStats));
|
||||
CompletableFuture datasetFuture = datasetRepository.getAuthenticated(datasetRepository.getWithCriteria(datasetCriteria), user, roles).countAsync()
|
||||
List<eu.eudat.elastic.entities.Dataset> finalDatasets = datasets;
|
||||
CompletableFuture datasetFuture = datasetRepository.getAuthenticated(datasets != null ? datasetRepository.asQueryable().where((builder, root) -> root.get("id").in(finalDatasets.stream().map(x -> UUID.fromString(x.getId())).collect(Collectors.toList()))) : datasetRepository.getWithCriteria(datasetCriteria), user, roles).countAsync()
|
||||
.whenComplete((datasetsStats, throwable) -> statistics.setTotalDataSetCount(datasetsStats));
|
||||
CompletableFuture grantFuture = grantRepository.getAuthenticated(grantRepository.getWithCriteria(grantCriteria), user).countAsync()
|
||||
.whenComplete((grantsStats, throwable) -> statistics.setTotalGrantCount(grantsStats));
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.configurations.dynamicgrant.DynamicGrantConfiguration;
|
||||
import eu.eudat.configurations.dynamicgrant.entities.Property;
|
||||
|
@ -9,10 +10,16 @@ import eu.eudat.data.dao.entities.*;
|
|||
import eu.eudat.data.entities.Organisation;
|
||||
import eu.eudat.data.entities.Researcher;
|
||||
import eu.eudat.data.entities.*;
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.ContactType;
|
||||
import eu.eudat.data.enumeration.notification.NotificationType;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
import eu.eudat.data.query.items.item.dmp.DataManagementPlanCriteriaRequest;
|
||||
import eu.eudat.data.query.items.table.datasetprofile.DatasetProfileTableRequestItem;
|
||||
import eu.eudat.data.query.items.table.dmp.DataManagementPlanTableRequest;
|
||||
import eu.eudat.data.query.items.table.dmp.DataManagmentPlanPublicTableRequest;
|
||||
import eu.eudat.elastic.entities.Collaborator;
|
||||
import eu.eudat.elastic.entities.Organization;
|
||||
import eu.eudat.exceptions.datamanagementplan.DMPNewVersionException;
|
||||
import eu.eudat.exceptions.datamanagementplan.DMPWithDatasetsDeleteException;
|
||||
import eu.eudat.exceptions.security.UnauthorisedException;
|
||||
|
@ -35,12 +42,15 @@ import eu.eudat.models.data.dmp.*;
|
|||
import eu.eudat.models.data.dynamicfields.DynamicFieldWithValue;
|
||||
import eu.eudat.models.data.entities.xmlmodels.dmpprofiledefinition.DataManagementPlanProfile;
|
||||
import eu.eudat.models.data.entities.xmlmodels.dmpprofiledefinition.Field;
|
||||
import eu.eudat.models.data.funder.FunderDMPEditorModel;
|
||||
import eu.eudat.models.data.grant.GrantDMPEditorModel;
|
||||
import eu.eudat.models.data.helpermodels.Tuple;
|
||||
import eu.eudat.models.data.helpers.common.DataTableData;
|
||||
import eu.eudat.models.data.listingmodels.DataManagementPlanListingModel;
|
||||
import eu.eudat.models.data.listingmodels.DataManagementPlanOverviewModel;
|
||||
import eu.eudat.models.data.listingmodels.DatasetListingModel;
|
||||
import eu.eudat.models.data.listingmodels.UserInfoListingModel;
|
||||
import eu.eudat.models.data.project.ProjectDMPEditorModel;
|
||||
import eu.eudat.models.data.rda.RDAExportModel;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.models.data.user.composite.PagedDatasetProfile;
|
||||
|
@ -59,6 +69,8 @@ import org.springframework.http.*;
|
|||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.web.client.HttpClientErrorException;
|
||||
import org.springframework.web.client.HttpServerErrorException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.w3c.dom.Document;
|
||||
|
@ -75,24 +87,34 @@ import java.nio.file.Files;
|
|||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Component
|
||||
public class DataManagementPlanManager {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataManagementPlanManager.class);
|
||||
|
||||
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][] {
|
||||
{NotificationType.DMP_MODIFIED, "/plans/edit"},
|
||||
{NotificationType.DMP_PUBLISH, "/plans/publicEdit"},
|
||||
{NotificationType.DMP_FINALISED, "/plans/edit"},
|
||||
{NotificationType.DMP_MODIFIED_FINALISED, "/plans/edit"}
|
||||
}).collect(Collectors.toMap(data -> (NotificationType) data[0], data -> (String) data[1]));
|
||||
|
||||
private ApiContext apiContext;
|
||||
private DatasetManager datasetManager;
|
||||
private UtilitiesService utilitiesService;
|
||||
private DatabaseRepository databaseRepository;
|
||||
private Environment environment;
|
||||
private RDAManager rdaManager;
|
||||
|
||||
@Autowired
|
||||
public DataManagementPlanManager(ApiContext apiContext, DatasetManager datasetManager, Environment environment) {
|
||||
public DataManagementPlanManager(ApiContext apiContext, DatasetManager datasetManager, Environment environment, RDAManager rdaManager) {
|
||||
this.apiContext = apiContext;
|
||||
this.datasetManager = datasetManager;
|
||||
this.utilitiesService = apiContext.getUtilitiesService();
|
||||
this.databaseRepository = apiContext.getOperationsContext().getDatabaseRepository();
|
||||
this.environment = environment;
|
||||
this.rdaManager = rdaManager;
|
||||
}
|
||||
|
||||
public DataTableData<DataManagementPlanListingModel> getPaged(DataManagementPlanTableRequest dataManagementPlanTableRequest, Principal principal, String fieldsGroup) throws Exception {
|
||||
|
@ -180,6 +202,10 @@ public class DataManagementPlanManager {
|
|||
}
|
||||
|
||||
public File getWordDocument(String id, Principal principal, ConfigLoader configLoader) throws IOException {
|
||||
return this.getWordDocument(id, principal, configLoader, false);
|
||||
}
|
||||
|
||||
public File getWordDocument(String id, Principal principal, ConfigLoader configLoader, Boolean versioned) throws IOException {
|
||||
WordBuilder wordBuilder = new WordBuilder();
|
||||
VisibilityRuleService visibilityRuleService = this.utilitiesService.getVisibilityRuleService();
|
||||
DatasetWizardModel dataset = new DatasetWizardModel();
|
||||
|
@ -325,9 +351,14 @@ public class DataManagementPlanManager {
|
|||
document.removeBodyElement(0);
|
||||
}
|
||||
|
||||
String fileName = dmpEntity.getLabel();
|
||||
String fileName = "";
|
||||
if (versioned) {
|
||||
fileName = dmpEntity.getLabel() + " v" + dmpEntity.getVersion();
|
||||
} else {
|
||||
fileName = dmpEntity.getLabel();
|
||||
}
|
||||
fileName = fileName.replaceAll("[^a-zA-Z0-9+ ]", "");
|
||||
File exportFile = new File(fileName + ".docx");
|
||||
File exportFile = new File(this.environment.getProperty("temp.temp") + fileName + ".docx");
|
||||
FileOutputStream out = new FileOutputStream(exportFile);
|
||||
document.write(out);
|
||||
out.close();
|
||||
|
@ -474,13 +505,16 @@ public class DataManagementPlanManager {
|
|||
}
|
||||
|
||||
public DMP createOrUpdate(ApiContext apiContext, DataManagementPlanEditorModel dataManagementPlan, Principal principal) throws Exception {
|
||||
|
||||
boolean setNotification = false;
|
||||
if (dataManagementPlan.getId() != null) {
|
||||
DMP dmp1 = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(dataManagementPlan.getId());
|
||||
|
||||
if (!isUserOwnerOfDmp(dmp1, principal)) {
|
||||
throw new Exception("User not being the creator is not authorized to edit this DMP.");
|
||||
}
|
||||
if (dmp1.getModified().getTime() != dataManagementPlan.getModified().getTime()) {
|
||||
throw new Exception("Another user have already edit that DMP.");
|
||||
}
|
||||
List<Dataset> datasetList = new ArrayList<>(dmp1.getDataset());
|
||||
for (Dataset dataset : datasetList) {
|
||||
if (dataManagementPlan.getProfiles().stream().filter(associatedProfile -> dataset.getProfile().getId().equals(associatedProfile.getId())).findAny().orElse(null) == null)
|
||||
|
@ -488,6 +522,8 @@ public class DataManagementPlanManager {
|
|||
}
|
||||
if (dataManagementPlan.getStatus() == (int) DMP.DMPStatus.FINALISED.getValue() && dmp1.getStatus().equals(DMP.DMPStatus.FINALISED.getValue()))
|
||||
throw new Exception("DMP is finalized, therefore cannot be edited.");
|
||||
|
||||
setNotification = true;
|
||||
}
|
||||
|
||||
DMP newDmp = dataManagementPlan.toDataModel();
|
||||
|
@ -565,9 +601,41 @@ public class DataManagementPlanManager {
|
|||
if (dataManagementPlan.getAssociatedUsers().size() == 0)
|
||||
assignUser(newDmp, user);
|
||||
|
||||
if (setNotification) {
|
||||
if (newDmp.getStatus() != DMP.DMPStatus.FINALISED.getValue()) {
|
||||
this.sendNotification(newDmp, user, NotificationType.DMP_MODIFIED);
|
||||
} else {
|
||||
this.sendNotification(newDmp, user, NotificationType.DMP_MODIFIED_FINALISED);
|
||||
}
|
||||
}
|
||||
|
||||
return newDmp;
|
||||
}
|
||||
|
||||
private void sendNotification(DMP dmp, UserInfo user, NotificationType notificationType) {
|
||||
List<UserDMP> userDMPS = databaseRepository.getUserDmpDao().asQueryable().where(((builder, root) -> builder.equal(root.get("dmp").get("id"), dmp.getId()))).toList();
|
||||
for (UserDMP userDMP : userDMPS) {
|
||||
if (!userDMP.getUser().getId().equals(user.getId())) {
|
||||
Notification notification = new Notification();
|
||||
notification.setUserId(user);
|
||||
notification.setType(notificationType);
|
||||
notification.setNotifyState(NotifyState.PENDING);
|
||||
notification.setIsActive(ActiveStatus.ACTIVE);
|
||||
notification.setData("{" +
|
||||
"\"userId\": \"" + userDMP.getUser().getId() + "\"" +
|
||||
", \"id\": \"" + userDMP.getDmp().getId() + "\"" +
|
||||
", \"name\": \"" + userDMP.getDmp().getLabel() + "\"" +
|
||||
", \"path\": \"" + notificationPaths.get(notificationType) +"\"" +
|
||||
"}");
|
||||
notification.setCreatedAt(new Date());
|
||||
notification.setUpdatedAt(notification.getCreatedAt());
|
||||
notification.setContactTypeHint(ContactType.EMAIL);
|
||||
notification.setContactHint(userDMP.getUser().getEmail());
|
||||
databaseRepository.getNotificationDao().createOrUpdate(notification);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void assignUser(DMP dmp, UserInfo userInfo) {
|
||||
UserDMP userDMP = new UserDMP();
|
||||
|
@ -661,6 +729,21 @@ public class DataManagementPlanManager {
|
|||
DMP oldDmp = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(uuid);
|
||||
oldDmp.setStatus(DMP.DMPStatus.DELETED.getValue());
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().createOrUpdate(oldDmp);
|
||||
DataManagementPlanCriteria criteria1 = new DataManagementPlanCriteria();
|
||||
criteria1.setAllVersions(true);
|
||||
criteria1.setGroupIds(Collections.singletonList(oldDmp.getGroupId()));
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getWithCriteria(criteria1).toList().forEach(dmp -> {
|
||||
for (Dataset dataset: dmp.getDataset()) {
|
||||
try {
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel().fromDataModel(dataset);
|
||||
datasetWizardModel.setTags(apiContext.getOperationsContext().getDatasetRepository().findDocument(dataset.getId().toString()).getTags());
|
||||
datasetWizardModel.setDatasetProfileDefinition(this.datasetManager.getPagedProfile(datasetWizardModel, dataset));
|
||||
this.datasetManager.updateTags(apiContext.getOperationsContext().getDatasetRepository(), datasetWizardModel);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void createResearchersIfTheyDontExist(DMP newDmp, ResearcherDao researcherRepository, UserInfo user) {
|
||||
|
@ -694,8 +777,9 @@ public class DataManagementPlanManager {
|
|||
if (newDmp.getGrant() != null) {
|
||||
Grant grant = newDmp.getGrant();
|
||||
GrantCriteria criteria = new GrantCriteria();
|
||||
if (grant.getReference() != null) {
|
||||
criteria.setReference(grant.getReference());
|
||||
eu.eudat.data.entities.Grant grantEntity = grantDao.getWithCriteria(criteria).getSingleOrDefault();
|
||||
eu.eudat.data.entities.Grant grantEntity = grantDao.getWithCriteria(criteria).toList().stream().max(Comparator.comparing(grant1 -> grant1.getModified().getTime())).orElse(null);
|
||||
if (grantEntity != null) grant.setId(grantEntity.getId());
|
||||
else {
|
||||
grant.setType(Grant.GrantType.EXTERNAL.getValue());
|
||||
|
@ -703,27 +787,39 @@ public class DataManagementPlanManager {
|
|||
grantDao.createOrUpdate(grant);
|
||||
}
|
||||
}
|
||||
else {
|
||||
grant.setType(Grant.GrantType.EXTERNAL.getValue());
|
||||
grant.setCreationUser(null);
|
||||
grantDao.createOrUpdate(grant);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createFunderIfItDoesntExist(DMP newDmp, FunderDao funderDao) {
|
||||
if (newDmp.getGrant().getFunder() != null) {
|
||||
Funder funder = newDmp.getGrant().getFunder();
|
||||
FunderCriteria criteria = new FunderCriteria();
|
||||
if (funder.getReference() != null) {
|
||||
criteria.setReference(funder.getReference());
|
||||
eu.eudat.data.entities.Funder funderEntity = funderDao.getWithCritetia(criteria).getSingleOrDefault();
|
||||
eu.eudat.data.entities.Funder funderEntity = funderDao.getWithCritetia(criteria).toList().stream().max(Comparator.comparing(funder1 -> funder1.getModified().getTime())).orElse(null);
|
||||
if (funderEntity != null) funder.setId(funderEntity.getId());
|
||||
else {
|
||||
funderDao.createOrUpdate(funder);
|
||||
}
|
||||
}
|
||||
else {
|
||||
funderDao.createOrUpdate(funder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createProjectIfItDoesntExist(DMP newDmp, ProjectDao projectDao) {
|
||||
if (newDmp.getProject() != null) {
|
||||
Project project = newDmp.getProject();
|
||||
ProjectCriteria criteria = new ProjectCriteria();
|
||||
if (project.getReference() != null) {
|
||||
criteria.setReference(project.getReference());
|
||||
eu.eudat.data.entities.Project projectEntity = projectDao.getWithCritetia(criteria).getSingleOrDefault();
|
||||
eu.eudat.data.entities.Project projectEntity = projectDao.getWithCritetia(criteria).toList().stream().max(Comparator.comparing(project1 -> project1.getModified().getTime())).orElse(null);
|
||||
if (projectEntity != null) project.setId(projectEntity.getId());
|
||||
else {
|
||||
project.setType(Project.ProjectType.EXTERNAL.getValue());
|
||||
|
@ -731,6 +827,12 @@ public class DataManagementPlanManager {
|
|||
projectDao.createOrUpdate(project);
|
||||
}
|
||||
}
|
||||
else {
|
||||
project.setType(Project.ProjectType.EXTERNAL.getValue());
|
||||
if (project.getId() == null) project.setId(UUID.randomUUID());
|
||||
projectDao.createOrUpdate(project);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIfUserCanEditGrant(DMP dmp, UserInfo user) throws Exception{
|
||||
|
@ -761,19 +863,19 @@ public class DataManagementPlanManager {
|
|||
}
|
||||
|
||||
private void assignGrandUserIfInternal(DMP dmp, UserInfo user) {
|
||||
if (dmp.getGrant().getCreationUser() == null && dmp.getGrant().getReference().startsWith("dmp:")) {
|
||||
if (dmp.getGrant().getCreationUser() == null && (dmp.getGrant().getReference() != null && dmp.getGrant().getReference().startsWith("dmp:"))) {
|
||||
dmp.getGrant().setCreationUser(user);
|
||||
}
|
||||
}
|
||||
|
||||
private void assignFunderUserIfInternal(DMP dmp, UserInfo user) {
|
||||
if (dmp.getGrant().getFunder().getCreationUser() == null && dmp.getGrant().getFunder().getReference().startsWith("dmp:")) {
|
||||
if (dmp.getGrant().getFunder().getCreationUser() == null && ( dmp.getGrant().getFunder().getReference() != null && dmp.getGrant().getFunder().getReference().startsWith("dmp:"))) {
|
||||
dmp.getGrant().getFunder().setCreationUser(user);
|
||||
}
|
||||
}
|
||||
|
||||
private void assignProjectUserIfInternal(DMP dmp, UserInfo user) {
|
||||
if (dmp.getProject().getCreationUser() == null && dmp.getProject().getReference().startsWith("dmp:")) {
|
||||
if (dmp.getProject().getCreationUser() == null && (dmp.getProject().getReference() != null && dmp.getProject().getReference().startsWith("dmp:"))) {
|
||||
dmp.getProject().setCreationUser(user);
|
||||
}
|
||||
}
|
||||
|
@ -784,6 +886,14 @@ public class DataManagementPlanManager {
|
|||
datasetDao.asQueryable().withHint(HintedModelFactory.getHint(DatasetListingModel.class)).where((builder, root) -> builder.equal(root.get("id"), dataset.getId())).getSingleAsync()
|
||||
.thenApplyAsync(entityDataset -> {
|
||||
Dataset newDataset = new Dataset();
|
||||
try {
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel().fromDataModel(entityDataset);
|
||||
datasetWizardModel.setTags(apiContext.getOperationsContext().getDatasetRepository().findDocument(dataset.getId().toString()).getTags());
|
||||
datasetWizardModel.setDatasetProfileDefinition(this.datasetManager.getPagedProfile(datasetWizardModel, entityDataset));
|
||||
this.datasetManager.updateTags(apiContext.getOperationsContext().getDatasetRepository(), datasetWizardModel);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
newDataset.update(entityDataset);
|
||||
newDataset.setDmp(newDmp);
|
||||
newDataset.setStatus(Dataset.Status.SAVED.getValue());
|
||||
|
@ -834,7 +944,48 @@ public class DataManagementPlanManager {
|
|||
newDataset.setCreated(new Date());
|
||||
return newDataset;
|
||||
}).thenApplyAsync(item -> {
|
||||
futures.add(datasetDao.createOrUpdateAsync(item));
|
||||
futures.add(datasetDao.createOrUpdateAsync(item).whenComplete(((dataset1, throwable) -> {
|
||||
eu.eudat.elastic.entities.Dataset datasetElastic = new eu.eudat.elastic.entities.Dataset();
|
||||
datasetElastic.setId(dataset1.getId().toString());
|
||||
datasetElastic.setLabel(dataset1.getLabel());
|
||||
datasetElastic.setDescription(dataset1.getDescription());
|
||||
datasetElastic.setTemplate(dataset1.getProfile().getId());
|
||||
datasetElastic.setStatus(dataset1.getStatus());
|
||||
datasetElastic.setDmp(dataset1.getDmp().getId());
|
||||
datasetElastic.setGroup(dataset1.getDmp().getGroupId());
|
||||
datasetElastic.setGrant(dataset1.getDmp().getGrant().getId());
|
||||
if (dataset1.getDmp().getUsers() != null) {
|
||||
datasetElastic.setCollaborators(dataset1.getDmp().getUsers().stream().map(user -> {
|
||||
Collaborator collaborator = new Collaborator();
|
||||
collaborator.setId(user.getId().toString());
|
||||
// collaborator.setName(user.getUser().getName());
|
||||
return collaborator;
|
||||
}).collect(Collectors.toList()));
|
||||
}
|
||||
datasetElastic.setLastVersion(true);
|
||||
datasetElastic.setLastPublicVersion(false);
|
||||
if (dataset1.getDmp().getOrganisations() != null) {
|
||||
datasetElastic.setOrganizations(dataset1.getDmp().getOrganisations().stream().map(org -> {
|
||||
Organization organization = new Organization();
|
||||
organization.setId(org.getId().toString());
|
||||
organization.setName(org.getLabel());
|
||||
return organization;
|
||||
}).collect(Collectors.toList()));
|
||||
}
|
||||
datasetElastic.setPublic(dataset1.getDmp().isPublic());
|
||||
datasetElastic.setGrantStatus(dataset1.getDmp().getGrant().getStatus());
|
||||
|
||||
try {
|
||||
datasetElastic.setTags(apiContext.getOperationsContext().getDatasetRepository().findDocument(dataset.getId().toString()).getTags());
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel();
|
||||
datasetWizardModel.setStatus(dataset1.getStatus());
|
||||
datasetWizardModel.setDatasetProfileDefinition(this.datasetManager.getPagedProfile(datasetWizardModel, dataset1));
|
||||
datasetElastic.setFormData(this.datasetManager.getWordDocumentText(datasetWizardModel));
|
||||
apiContext.getOperationsContext().getDatasetRepository().createOrUpdate(datasetElastic);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
})));
|
||||
return futures;
|
||||
}).join();
|
||||
|
||||
|
@ -850,7 +1001,7 @@ public class DataManagementPlanManager {
|
|||
List<Dataset> datasets = dmp.getDataset().stream().collect(Collectors.toList());
|
||||
String fileName = dmp.getLabel();
|
||||
fileName = fileName.replaceAll("[^a-zA-Z0-9+ ]", "");
|
||||
File xmlFile = new File(fileName + ".xml");
|
||||
File xmlFile = new File(this.environment.getProperty("temp.temp") + fileName + ".xml");
|
||||
BufferedWriter writer = new BufferedWriter(new FileWriter(xmlFile, true));
|
||||
Document xmlDoc = XmlBuilder.getDocument();
|
||||
Element dmpElement = xmlDoc.createElement("dmp");
|
||||
|
@ -933,6 +1084,10 @@ public class DataManagementPlanManager {
|
|||
|
||||
for (Dataset dataset : datasets) {
|
||||
Element datasetElement = xmlDoc.createElement("dataset");
|
||||
Element datsetProfileElement = xmlDoc.createElement("profile");
|
||||
datasetElement.setAttribute("name", dataset.getLabel());
|
||||
datasetElement.appendChild(datsetProfileElement);
|
||||
datsetProfileElement.setTextContent(dataset.getProfile().getId().toString());
|
||||
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel();
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
|
@ -976,15 +1131,20 @@ public class DataManagementPlanManager {
|
|||
eu.eudat.data.entities.DMP dmp = databaseRepository.getDmpDao().find(UUID.fromString(id));
|
||||
if (!dmp.isPublic() && dmp.getUsers().stream().noneMatch(userInfo -> userInfo.getUser().getId() == principal.getId()))
|
||||
throw new UnauthorisedException();
|
||||
RDAExportModel rdaExportModel = new RDAExportModel().fromDataModel(dmp, datasetManager);
|
||||
// RDAExportModel rdaExportModel = new RDAExportModel().fromDataModel(dmp, datasetManager, principal);
|
||||
String result = rdaManager.convertToRDA(dmp);
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
String fileName = dmp.getLabel();
|
||||
fileName = fileName.replaceAll("[^a-zA-Z0-9+ ]", "");
|
||||
File file = new File(fileName);
|
||||
File file = new File(this.environment.getProperty("temp.temp") + fileName + ".json");
|
||||
OutputStream output = new FileOutputStream(file);
|
||||
try {
|
||||
mapper.writeValue(file, rdaExportModel);
|
||||
// mapper.writeValue(file, rdaExportModel);
|
||||
output.write(result.getBytes());
|
||||
output.flush();
|
||||
output.close();
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
|
@ -1049,7 +1209,7 @@ public class DataManagementPlanManager {
|
|||
// TODO Iterate through the list of dataManagementPlans.
|
||||
// Creates new dataManagementPlan to fill it with the data model that was parsed from the xml.
|
||||
// Creates properties.
|
||||
DataManagementPlan dm = new DataManagementPlan();
|
||||
DataManagementPlanEditorModel dm = new DataManagementPlanEditorModel();
|
||||
DataManagementPlanProfile dmpProfile = new DataManagementPlanProfile();
|
||||
|
||||
List<Field> fieldList = new LinkedList<>();
|
||||
|
@ -1063,12 +1223,29 @@ public class DataManagementPlanManager {
|
|||
/*Tuple tuple = new Tuple();
|
||||
tuple.setId(dataManagementPlans.get(0).getDmpProfile().getDmpProfileId());
|
||||
tuple.setLabel(dataManagementPlans.get(0).getDmpProfile().getDmpProfileName());*/
|
||||
eu.eudat.models.data.funder.Funder funder = new eu.eudat.models.data.funder.Funder();
|
||||
FunderImportModels funderImport = dataManagementPlans.get(0).getFunderImportModels();
|
||||
funder.setId(funderImport.getId());
|
||||
funder.setLabel(funderImport.getLabel());
|
||||
FunderDMPEditorModel funderEditor = new FunderDMPEditorModel();
|
||||
funderEditor.setExistFunder(funder);
|
||||
|
||||
eu.eudat.models.data.grant.Grant grant = new eu.eudat.models.data.grant.Grant();
|
||||
GrantImportModels grantImport = dataManagementPlans.get(0).getGrantImport();
|
||||
grant.setId(grantImport.getId());
|
||||
grant.setLabel(grantImport.getLabel());
|
||||
grant.setAbbreviation(grantImport.getAbbreviation());
|
||||
grant.setDescription(grantImport.getDescription());
|
||||
GrantDMPEditorModel grantEditor = new GrantDMPEditorModel();
|
||||
grantEditor.setExistGrant(grant);
|
||||
|
||||
eu.eudat.models.data.project.Project project = new eu.eudat.models.data.project.Project();
|
||||
ProjectImportModels projectImport = dataManagementPlans.get(0).getProjectImportModels();
|
||||
project.setId(projectImport.getId());
|
||||
project.setLabel(projectImport.getLabel());
|
||||
ProjectDMPEditorModel projectEditor = new ProjectDMPEditorModel();
|
||||
projectEditor.setExistProject(project);
|
||||
|
||||
List<eu.eudat.models.data.dmp.AssociatedProfile> associatedProfiles = new LinkedList<>();
|
||||
for (AssociatedProfileImportModels a : dataManagementPlans.get(0).getProfilesImportModels()) {
|
||||
AssociatedProfile associatedProfile = new AssociatedProfile();
|
||||
|
@ -1080,14 +1257,16 @@ public class DataManagementPlanManager {
|
|||
for (OrganisationImportModel org : dataManagementPlans.get(0).getOrganisationImportModels()) {
|
||||
eu.eudat.models.data.dmp.Organisation organisation = new eu.eudat.models.data.dmp.Organisation();
|
||||
organisation.setLabel(org.getOrganaisationNameImport());
|
||||
organisation.setId(org.getOrganaisationReferenceImport());
|
||||
organisation.setReference(org.getOrganaisationReferenceImport());
|
||||
organisation.setKey(organisation.getReference().split(":")[0]);
|
||||
organisations.add(organisation);
|
||||
}
|
||||
List<eu.eudat.models.data.dmp.Researcher> researchers = new LinkedList<>();
|
||||
for (ResearcherImportModels res : dataManagementPlans.get(0).getResearchersImportModels()) {
|
||||
eu.eudat.models.data.dmp.Researcher researcher = new eu.eudat.models.data.dmp.Researcher();
|
||||
researcher.setLabel(res.getResearcherImportName());
|
||||
researcher.setId(res.getResearcherImportReference());
|
||||
researcher.setReference(res.getResearcherImportReference());
|
||||
researcher.setKey(researcher.getReference().split(":")[0]);
|
||||
researchers.add(researcher);
|
||||
}
|
||||
|
||||
|
@ -1096,7 +1275,9 @@ public class DataManagementPlanManager {
|
|||
|
||||
// Sets properties.
|
||||
dm.setLabel(files[0].getOriginalFilename()); // Sets label.
|
||||
dm.setGrant(grant); //Sets grant property.
|
||||
dm.setGrant(grantEditor); //Sets grant property.
|
||||
dm.setFunder(funderEditor);
|
||||
dm.setProject(projectEditor);
|
||||
dm.setDescription(dataManagementPlans.get(0).getDescriptionImport()); // Sets description property.
|
||||
dm.setProfiles(associatedProfiles);
|
||||
dm.setOrganisations(organisations); // Sets organisations property.
|
||||
|
@ -1106,6 +1287,41 @@ public class DataManagementPlanManager {
|
|||
dm.setDefinition(dmpProfile);
|
||||
|
||||
//createOrUpdate(apiContext, dm, principal);
|
||||
DMP dmp = this.createOrUpdate(apiContext, dm, principal);
|
||||
if (dmp.getOrganisations() == null) {
|
||||
dmp.setOrganisations(new HashSet<>());
|
||||
}
|
||||
if (dmp.getResearchers() == null) {
|
||||
dmp.setResearchers(new HashSet<>());
|
||||
}
|
||||
if (dmp.getDataset() == null) {
|
||||
dmp.setDataset(new HashSet<>());
|
||||
}
|
||||
if (dmp.getUsers() == null) {
|
||||
dmp.setUsers(new HashSet<>());
|
||||
}
|
||||
if (dmp.getAssociatedDmps() == null) {
|
||||
dmp.setAssociatedDmps(new HashSet<>());
|
||||
}
|
||||
|
||||
List<DatasetListingModel> datasets = new LinkedList<>();
|
||||
for (DatasetImportModels das: dataManagementPlans.get(0).getDatasetImportModels()) {
|
||||
eu.eudat.data.entities.Dataset dataset = new eu.eudat.data.entities.Dataset();
|
||||
dataset.setLabel(das.getName());
|
||||
dataset.setProfile(databaseRepository.getDatasetProfileDao().find(das.getProfile()));
|
||||
dataset.setProperties(new ObjectMapper().writeValueAsString(das.getFieldImportModels()));
|
||||
dataset.setStatus((short) 0);
|
||||
dataset.setRegistries(new HashSet<>());
|
||||
dataset.setDatasetDataRepositories(new HashSet<>());
|
||||
dataset.setServices(new HashSet<>());
|
||||
dataset.setDatasetExternalDatasets(new HashSet<>());
|
||||
dataset.setDmp(dmp);
|
||||
DatasetWizardModel datasetWizard = new DatasetWizardModel();
|
||||
datasetWizard.setDatasetProfileDefinition(this.datasetManager.getPagedProfile(datasetWizard, dataset));
|
||||
datasetWizard.fromDataModel(dataset);
|
||||
this.datasetManager.createOrUpdate(datasetWizard, principal);
|
||||
// datasets.add(new DatasetListingModel().fromDataModel(dataset));
|
||||
}
|
||||
|
||||
logger.info(dm.toString());
|
||||
}
|
||||
|
@ -1113,6 +1329,62 @@ public class DataManagementPlanManager {
|
|||
return dataManagementPlans;
|
||||
}
|
||||
|
||||
public List<DMP> createFromRDA(MultipartFile[] files, Principal principal) throws IOException {
|
||||
if (principal.getId() == null) {
|
||||
throw new UnauthorisedException("No user is logged in");
|
||||
}
|
||||
List<DMP> result = new ArrayList<>();
|
||||
for (MultipartFile file: files) {
|
||||
DMP dmp = rdaManager.convertToEntity(new String(file.getBytes(), "UTF-8"));
|
||||
dmp.setLabel(file.getOriginalFilename());
|
||||
UserInfo me = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
|
||||
dmp.setModified(new Date());
|
||||
dmp.setCreator(me);
|
||||
dmp.setVersion(0);
|
||||
dmp.setStatus((short)0);
|
||||
dmp.setGroupId(UUID.randomUUID());
|
||||
if (dmp.getResearchers() != null && !dmp.getResearchers().isEmpty()) {
|
||||
dmp.getResearchers().forEach(researcher -> {
|
||||
researcher.setId(UUID.randomUUID());
|
||||
researcher.setCreated(new Date());
|
||||
researcher.setModified(new Date());
|
||||
researcher.setStatus((short) 0);
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getResearcherDao().createOrUpdate(researcher);
|
||||
});
|
||||
}
|
||||
databaseRepository.getDmpDao().createOrUpdate(dmp);
|
||||
assignUser(dmp, me);
|
||||
dmp.getDataset().forEach(dataset -> {
|
||||
dataset.setStatus(Dataset.Status.SAVED.getValue());
|
||||
dataset.setCreated(new Date());
|
||||
dataset.setModified(new Date());
|
||||
dataset.setDmp(dmp);
|
||||
dataset = databaseRepository.getDatasetDao().createOrUpdate(dataset);
|
||||
dataset.setRegistries(new HashSet<>());
|
||||
dataset.setDatasetDataRepositories(new HashSet<>());
|
||||
dataset.setDatasetExternalDatasets(new HashSet<>());
|
||||
dataset.setServices(new HashSet<>());
|
||||
if (dmp.getOrganisations() == null) {
|
||||
dmp.setOrganisations(new HashSet<>());
|
||||
}
|
||||
if (dmp.getResearchers() == null) {
|
||||
dmp.setResearchers(new HashSet<>());
|
||||
}
|
||||
dmp.setUsers(new HashSet<>(apiContext.getOperationsContext().getDatabaseRepository().getUserDmpDao().asQueryable().where((builder, root) -> builder.equal(root.get("dmp").get("id"), dmp.getId())).toList()));
|
||||
try {
|
||||
DatasetWizardModel model = new DatasetWizardModel().fromDataModel(dataset);
|
||||
model.setDatasetProfileDefinition(datasetManager.getPagedProfile(model, dataset));
|
||||
datasetManager.updateTags(apiContext.getOperationsContext().getDatasetRepository(), model);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
result.add(dmp);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public DataTableData<DatasetProfileListingModel> getDatasetProfilesUsedByDMP(DatasetProfileTableRequestItem datasetProfileTableRequestItem, Principal principal) {
|
||||
datasetProfileTableRequestItem.getCriteria().setFilter(DatasetProfileCriteria.DatasetProfileFilter.DMPs.getValue());
|
||||
datasetProfileTableRequestItem.getCriteria().setUserId(principal.getId());
|
||||
|
@ -1136,6 +1408,23 @@ public class DataManagementPlanManager {
|
|||
throw new Exception("DMP is not finalized");
|
||||
dmp.setPublic(true);
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().createOrUpdate(dmp);
|
||||
DataManagementPlanCriteria criteria = new DataManagementPlanCriteria();
|
||||
criteria.setGroupIds(Collections.singletonList(dmp.getGroupId()));
|
||||
criteria.setAllVersions(true);
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getWithCriteria(criteria).toList().stream().forEach(dmp1 -> {
|
||||
dmp1.getDataset().forEach(dataset -> {
|
||||
try {
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel().fromDataModel(dataset);
|
||||
datasetWizardModel.setTags(apiContext.getOperationsContext().getDatasetRepository().findDocument(dataset.getId().toString()).getTags());
|
||||
datasetWizardModel.setDatasetProfileDefinition(this.datasetManager.getPagedProfile(datasetWizardModel, dataset));
|
||||
this.datasetManager.updateTags(apiContext.getOperationsContext().getDatasetRepository(), datasetWizardModel);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
});
|
||||
UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
|
||||
sendNotification(dmp, user, NotificationType.DMP_PUBLISH);
|
||||
}
|
||||
|
||||
public void makeFinalize(UUID id, Principal principal, DatasetsToBeFinalized datasetsToBeFinalized) throws Exception {
|
||||
|
@ -1171,6 +1460,9 @@ public class DataManagementPlanManager {
|
|||
apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao()
|
||||
.asQueryable().where((builder, root) -> root.get("id").in(datasetsToBeCanceled))
|
||||
.update(root -> root.<Integer>get("status"), Dataset.Status.CANCELED.getValue());
|
||||
|
||||
UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(principal.getId());
|
||||
sendNotification(dmp, user, NotificationType.DMP_FINALISED);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1178,6 +1470,25 @@ public class DataManagementPlanManager {
|
|||
return (dmp.getUsers().stream().filter(userDMP -> userDMP.getRole().equals(UserDMP.UserDMPRoles.OWNER.getValue())).findFirst().get().getUser().getId()).equals(principal.getId());
|
||||
}
|
||||
|
||||
private String getPreviousDOI(UUID groupId, UUID selfId) {
|
||||
DataManagementPlanCriteria criteria = new DataManagementPlanCriteria();
|
||||
List<UUID> groupIds = new ArrayList<>();
|
||||
groupIds.add(groupId);
|
||||
criteria.setGroupIds(groupIds);
|
||||
criteria.setAllVersions(true);
|
||||
List<DMP> dmps = this.databaseRepository.getDmpDao().getWithCriteria(criteria).toList();
|
||||
String doi = null;
|
||||
for (DMP dmp: dmps) {
|
||||
if (!dmp.getId().equals(selfId)) {
|
||||
if (dmp.getDoi() != null && !dmp.getDoi().isEmpty()) {
|
||||
doi = dmp.getDoi();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doi;
|
||||
}
|
||||
|
||||
public String createZenodoDoi(UUID id, Principal principal, ConfigLoader configLoader) throws Exception {
|
||||
DMP dmp = this.apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(id);
|
||||
if (!isUserOwnerOfDmp(dmp, principal))
|
||||
|
@ -1190,22 +1501,63 @@ public class DataManagementPlanManager {
|
|||
// First step, post call to Zenodo, to create the entry.
|
||||
RestTemplate restTemplate = new RestTemplate();
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.set("accept", "application/json");
|
||||
headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
String createData = "{\n" +
|
||||
" \"metadata\": {\n" +
|
||||
" \"title\": \"" + dmp.getLabel() + "\",\n" +
|
||||
" \"upload_type\": \"publication\",\n" +
|
||||
" \"publication_type\": \"datamanagementplan\",\n" +
|
||||
" \"description\": \"" + dmp.getDescription() + "\",\n" +
|
||||
" \"description\": \"" + (dmp.getDescription() != null && !dmp.getDescription().isEmpty() ? dmp.getDescription() : "<p></p>") + "\",\n" +
|
||||
" \"version\": \"" + dmp.getVersion() + "\",\n" +
|
||||
" \"creators\": [{\n" +
|
||||
" \t\t\"name\": \"" + dmp.getUsers().stream().filter(userDMP -> userDMP.getRole().equals(UserDMP.UserDMPRoles.OWNER.getValue())).findFirst().get().getUser().getName() + "\",\n" +
|
||||
" \t\t\"affiliation\": \"OpenDMP\"}]\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
HttpEntity<String> request = new HttpEntity<>(createData, headers);
|
||||
JsonNode createDataJSON = new ObjectMapper().readTree(createData);
|
||||
HttpEntity<JsonNode> request = new HttpEntity<>(createDataJSON, headers);
|
||||
Map createResponse = null;
|
||||
LinkedHashMap<String, String> links = null;
|
||||
String previousDOI = this.getPreviousDOI(dmp.getGroupId(), dmp.getId());
|
||||
try {
|
||||
if (previousDOI == null) {
|
||||
String createUrl = this.environment.getProperty("zenodo.url") + "deposit/depositions" + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
Map createResponse = restTemplate.postForObject(createUrl, request, Map.class);
|
||||
createResponse = restTemplate.postForEntity(createUrl, request, Map.class).getBody();
|
||||
links = (LinkedHashMap<String, String>) createResponse.get("links");
|
||||
} else {
|
||||
//It requires more than one step to create a new version
|
||||
//First, get the deposit related to the concept DOI
|
||||
String listUrl = this.environment.getProperty("zenodo.url") + "deposit/depositions" + "?q=conceptdoi:\"" + previousDOI + "\"&access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
ResponseEntity<Map[]> listResponses = restTemplate.getForEntity(listUrl, Map[].class);
|
||||
createResponse = listResponses.getBody()[0];
|
||||
links = (LinkedHashMap<String, String>) createResponse.get("links");
|
||||
//Second, make the new version (not in the links?)
|
||||
String newVersionUrl = links.get("self") + "/actions/newversion" + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
createResponse = restTemplate.postForObject(newVersionUrl, null, Map.class);
|
||||
links = (LinkedHashMap<String, String>) createResponse.get("links");
|
||||
//Third, get the new deposit
|
||||
String latestDraftUrl = links.get("latest_draft") + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
createResponse = restTemplate.getForObject(latestDraftUrl, Map.class);
|
||||
links = (LinkedHashMap<String, String>) createResponse.get("links");
|
||||
//At this point it might fail to perform the next requests so enclose them with try catch
|
||||
try {
|
||||
//Forth, update the new deposit's metadata
|
||||
String updateUrl = links.get("self") + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
restTemplate.put(updateUrl, request);
|
||||
//And finally remove pre-existing files from it
|
||||
String fileListUrl = links.get("self") + "/files" + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
ResponseEntity<Map[]> fileListResponse = restTemplate.getForEntity(fileListUrl, Map[].class);
|
||||
for (Map file : fileListResponse.getBody()) {
|
||||
String fileDeleteUrl = links.get("self") + "/files/" + file.get("id") + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
restTemplate.delete(fileDeleteUrl);
|
||||
}
|
||||
}catch (Exception e) {
|
||||
//In case the last two steps fail delete the latest Deposit it in order to create a new one (only one at a time is allowed)
|
||||
restTemplate.delete(latestDraftUrl);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// Second step, add the file to the entry.
|
||||
HttpHeaders fileHeaders = new HttpHeaders();
|
||||
|
@ -1218,7 +1570,6 @@ public class DataManagementPlanManager {
|
|||
addFileMap.add("file", fileSystemResource);
|
||||
HttpEntity<MultiValueMap<String, Object>> addFileMapRequest = new HttpEntity<>(addFileMap, fileHeaders);
|
||||
|
||||
LinkedHashMap<String, String> links = (LinkedHashMap<String, String>) createResponse.get("links");
|
||||
String addFileUrl = links.get("files") + "?access_token=" + this.environment.getProperty("zenodo.access_token");
|
||||
ResponseEntity<String> addFileResponse = restTemplate.postForEntity(addFileUrl, addFileMapRequest, String.class);
|
||||
Files.deleteIfExists(file.toPath());
|
||||
|
@ -1230,5 +1581,10 @@ public class DataManagementPlanManager {
|
|||
dmp.setDoi((String) publishResponce.get("conceptdoi"));
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().createOrUpdate(dmp);
|
||||
return (String) publishResponce.get("conceptdoi");
|
||||
} catch (HttpClientErrorException | HttpServerErrorException ex) {
|
||||
ObjectMapper ob = new ObjectMapper();
|
||||
Map<String, String> parsedException = ob.readValue(ex.getResponseBodyAsString(), HashMap.class);
|
||||
throw new IOException(parsedException.get("message"), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,11 +5,19 @@ import eu.eudat.data.dao.entities.DataRepositoryDao;
|
|||
import eu.eudat.data.dao.entities.DatasetDao;
|
||||
import eu.eudat.data.dao.entities.RegistryDao;
|
||||
import eu.eudat.data.entities.*;
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.ContactType;
|
||||
import eu.eudat.data.enumeration.notification.NotificationType;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
import eu.eudat.data.query.items.table.dataset.DatasetPublicTableRequest;
|
||||
import eu.eudat.data.query.items.table.dataset.DatasetTableRequest;
|
||||
import eu.eudat.data.query.items.table.datasetprofile.DatasetProfileTableRequestItem;
|
||||
import eu.eudat.elastic.criteria.DatasetCriteria;
|
||||
import eu.eudat.elastic.entities.Collaborator;
|
||||
import eu.eudat.elastic.entities.Organization;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.elastic.repository.DatasetRepository;
|
||||
import eu.eudat.exceptions.security.UnauthorisedException;
|
||||
import eu.eudat.logic.builders.BuilderFactory;
|
||||
import eu.eudat.logic.builders.entity.UserInfoBuilder;
|
||||
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
|
||||
|
@ -20,18 +28,22 @@ import eu.eudat.logic.utilities.documents.helpers.FileEnvelope;
|
|||
import eu.eudat.logic.utilities.documents.types.ParagraphStyle;
|
||||
import eu.eudat.logic.utilities.documents.word.WordBuilder;
|
||||
import eu.eudat.logic.utilities.documents.xml.ExportXmlBuilder;
|
||||
import eu.eudat.logic.utilities.helpers.StreamDistinctBy;
|
||||
import eu.eudat.models.HintedModelFactory;
|
||||
import eu.eudat.models.data.datasetImport.DatasetImportField;
|
||||
import eu.eudat.models.data.datasetImport.DatasetImportPagedDatasetProfile;
|
||||
import eu.eudat.models.data.datasetprofile.DatasetProfileListingModel;
|
||||
import eu.eudat.models.data.datasetwizard.DatasetWizardModel;
|
||||
import eu.eudat.models.data.dmp.AssociatedProfile;
|
||||
import eu.eudat.models.data.dmp.DataManagementPlan;
|
||||
import eu.eudat.models.data.helpers.common.DataTableData;
|
||||
import eu.eudat.models.data.listingmodels.DatasetListingModel;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.models.data.user.composite.PagedDatasetProfile;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.types.Authorities;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.poi.xwpf.extractor.XWPFWordExtractor;
|
||||
import org.apache.poi.xwpf.usermodel.XWPFDocument;
|
||||
import org.apache.poi.xwpf.usermodel.XWPFParagraph;
|
||||
import org.apache.poi.xwpf.usermodel.XWPFRun;
|
||||
|
@ -63,9 +75,7 @@ import javax.xml.xpath.XPathConstants;
|
|||
import javax.xml.xpath.XPathFactory;
|
||||
import java.io.*;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -77,46 +87,100 @@ import java.util.zip.ZipInputStream;
|
|||
public class DatasetManager {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DatasetManager.class);
|
||||
|
||||
private final Map<NotificationType, String> notificationPaths = Stream.of(new Object[][] {
|
||||
{NotificationType.DATASET_MODIFIED, "/datasets/edit"},
|
||||
{NotificationType.DATASET_MODIFIED_FINALISED, "/datasets/edit"}
|
||||
}).collect(Collectors.toMap(data -> (NotificationType) data[0], data -> (String) data[1]));
|
||||
|
||||
private ApiContext apiContext;
|
||||
private DatabaseRepository databaseRepository;
|
||||
private DatasetRepository datasetRepository;
|
||||
private BuilderFactory builderFactory;
|
||||
private UserManager userManager;
|
||||
private ConfigLoader configLoader;
|
||||
private Environment environment;
|
||||
|
||||
@Autowired
|
||||
public DatasetManager(ApiContext apiContext, UserManager userManager) {
|
||||
public DatasetManager(ApiContext apiContext, UserManager userManager, ConfigLoader configLoader, Environment environment) {
|
||||
this.apiContext = apiContext;
|
||||
this.databaseRepository = apiContext.getOperationsContext().getDatabaseRepository();
|
||||
this.datasetRepository = apiContext.getOperationsContext().getDatasetRepository();
|
||||
this.builderFactory = apiContext.getOperationsContext().getBuilderFactory();
|
||||
this.userManager = userManager;
|
||||
this.configLoader = configLoader;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
public DataTableData<DatasetListingModel> getPaged(DatasetTableRequest datasetTableRequest, Principal principal) throws Exception {
|
||||
DatasetCriteria datasetCriteria = new DatasetCriteria();
|
||||
datasetCriteria.setLike(datasetTableRequest.getCriteria().getLike());
|
||||
datasetCriteria.setDatasetTemplates(datasetTableRequest.getCriteria().getDatasetTemplates());
|
||||
if (datasetTableRequest.getCriteria().getStatus() != null) {
|
||||
datasetCriteria.setStatus(datasetTableRequest.getCriteria().getStatus().shortValue());
|
||||
}
|
||||
datasetCriteria.setDmps(datasetTableRequest.getCriteria().getDmpIds());
|
||||
datasetCriteria.setGroupIds(datasetTableRequest.getCriteria().getGroupIds());
|
||||
datasetCriteria.setGrants(datasetTableRequest.getCriteria().getGrants());
|
||||
datasetCriteria.setCollaborators(datasetTableRequest.getCriteria().getCollaborators());
|
||||
datasetCriteria.setAllowAllVersions(datasetTableRequest.getCriteria().getAllVersions());
|
||||
datasetCriteria.setOrganiztions(datasetTableRequest.getCriteria().getOrganisations());
|
||||
datasetCriteria.setTags(datasetTableRequest.getCriteria().getTags());
|
||||
if (datasetTableRequest.getCriteria().getIsPublic() != null) {
|
||||
datasetCriteria.setPublic(datasetTableRequest.getCriteria().getIsPublic());
|
||||
}
|
||||
if (datasetTableRequest.getCriteria().getGrantStatus() != null) {
|
||||
datasetCriteria.setGrantStatus(datasetTableRequest.getCriteria().getGrantStatus());
|
||||
}
|
||||
List<eu.eudat.elastic.entities.Dataset> datasets;
|
||||
try {
|
||||
datasets = datasetCriteria.getTags() != null && datasetCriteria.getTags().size() > 0 && datasetRepository.exists() ?
|
||||
datasetRepository.query(datasetCriteria) : new LinkedList<>();
|
||||
datasets = datasetRepository.exists() ?
|
||||
datasetRepository.query(datasetCriteria) : null;
|
||||
} catch (Exception ex) {
|
||||
logger.warn(ex.getMessage());
|
||||
datasets = null;
|
||||
}
|
||||
|
||||
UserInfo userInfo = builderFactory.getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
|
||||
QueryableList<eu.eudat.data.entities.Dataset> items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
if (datasets != null && datasetTableRequest.getCriteria().getTags() != null && !datasetTableRequest.getCriteria().getTags().isEmpty()) {
|
||||
// QueryableList<eu.eudat.data.entities.Dataset> items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
QueryableList<eu.eudat.data.entities.Dataset> items;
|
||||
if (datasets != null) {
|
||||
|
||||
if (!datasets.isEmpty()) {
|
||||
items = databaseRepository.getDatasetDao().asQueryable().withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
List<eu.eudat.elastic.entities.Dataset> finalDatasets = datasets;
|
||||
items.where((builder, root) -> root.get("id").in(finalDatasets.stream().map(x -> UUID.fromString(x.getId())).collect(Collectors.toList())));
|
||||
} else
|
||||
items.where((builder, root) -> root.get("id").in(new UUID[]{UUID.randomUUID()}));
|
||||
} else {
|
||||
items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
//items.where((builder, root) -> root.get("id").in(new UUID[]{UUID.randomUUID()}));
|
||||
}
|
||||
} else {
|
||||
items = databaseRepository.getDatasetDao().getWithCriteria(datasetTableRequest.getCriteria()).withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
}
|
||||
List<Integer> roles = new LinkedList<>();
|
||||
if (datasetTableRequest.getCriteria().getRole() != null) roles.add(datasetTableRequest.getCriteria().getRole());
|
||||
QueryableList<eu.eudat.data.entities.Dataset> authItems = databaseRepository.getDatasetDao().getAuthenticated(items, userInfo, roles);
|
||||
QueryableList<eu.eudat.data.entities.Dataset> pagedItems = PaginationManager.applyPaging(authItems, datasetTableRequest);
|
||||
DataTableData<DatasetListingModel> dataTable = new DataTableData<DatasetListingModel>();
|
||||
QueryableList<eu.eudat.data.entities.Dataset> pagedItems;
|
||||
QueryableList<eu.eudat.data.entities.Dataset> authItems;
|
||||
if (!datasetTableRequest.getCriteria().getIsPublic()) {
|
||||
if (principal.getId() == null) {
|
||||
throw new UnauthorisedException();
|
||||
}
|
||||
if (datasetTableRequest.getCriteria().getRole() != null)
|
||||
roles.add(datasetTableRequest.getCriteria().getRole());
|
||||
authItems = databaseRepository.getDatasetDao().getAuthenticated(items, userInfo, roles);
|
||||
pagedItems = PaginationManager.applyPaging(authItems, datasetTableRequest);
|
||||
} else {
|
||||
if (principal.getId() != null && datasetTableRequest.getCriteria().getRole() != null) {
|
||||
items.where((builder, root) -> {
|
||||
Join userJoin = root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT);
|
||||
return builder.and(builder.equal(userJoin.join("user", JoinType.LEFT).get("id"), principal.getId()), builder.equal(userJoin.get("role"), datasetTableRequest.getCriteria().getRole()));
|
||||
});
|
||||
}
|
||||
String[] strings = new String[1];
|
||||
strings[0] = "-dmp:publishedAt|join|";
|
||||
datasetTableRequest.getOrderings().setFields(strings);
|
||||
authItems = items;
|
||||
pagedItems = PaginationManager.applyPaging(items, datasetTableRequest);
|
||||
}
|
||||
DataTableData<DatasetListingModel> dataTable = new DataTableData<>();
|
||||
|
||||
|
||||
CompletableFuture<List<DatasetListingModel>> itemsFuture = pagedItems.
|
||||
|
@ -134,23 +198,32 @@ public class DatasetManager {
|
|||
|
||||
public DataTableData<DatasetListingModel> getPaged(DatasetPublicTableRequest datasetTableRequest, Principal principal) throws Exception {
|
||||
DatasetCriteria datasetCriteria = new DatasetCriteria();
|
||||
datasetCriteria.setTags(datasetTableRequest.getCriteria().getTags());
|
||||
datasetCriteria.setLike(datasetTableRequest.getCriteria().getLike());
|
||||
datasetCriteria.setDatasetTemplates(datasetTableRequest.getCriteria().getDatasetProfile());
|
||||
datasetCriteria.setDmps(datasetTableRequest.getCriteria().getDmpIds());
|
||||
datasetCriteria.setGrants(datasetTableRequest.getCriteria().getGrants());
|
||||
List<eu.eudat.elastic.entities.Dataset> datasets;
|
||||
try {
|
||||
datasets = datasetCriteria.getTags() != null && datasetCriteria.getTags().size() > 0 && datasetRepository.exists() ?
|
||||
datasets = datasetRepository.exists() ?
|
||||
datasetRepository.query(datasetCriteria) : new LinkedList<>();
|
||||
} catch (Exception ex) {
|
||||
logger.warn(ex.getMessage());
|
||||
datasets = null;
|
||||
}
|
||||
datasetTableRequest.setQuery(databaseRepository.getDatasetDao().asQueryable().withHint(HintedModelFactory.getHint(DatasetListingModel.class)));
|
||||
QueryableList<Dataset> items = datasetTableRequest.applyCriteria();
|
||||
if (datasets != null && datasetTableRequest.getCriteria().getTags() != null && !datasetTableRequest.getCriteria().getTags().isEmpty()) {
|
||||
/*QueryableList<Dataset> items;
|
||||
if (datasets != null) {
|
||||
if (!datasets.isEmpty()) {
|
||||
items = databaseRepository.getDatasetDao().asQueryable().withHint(HintedModelFactory.getHint(DatasetListingModel.class));
|
||||
List<eu.eudat.elastic.entities.Dataset> finalDatasets = datasets;
|
||||
items.where((builder, root) -> root.get("id").in(finalDatasets.stream().map(x -> UUID.fromString(x.getId())).collect(Collectors.toList())));
|
||||
} else
|
||||
items = datasetTableRequest.applyCriteria();
|
||||
items.where((builder, root) -> root.get("id").in(new UUID[]{UUID.randomUUID()}));
|
||||
}
|
||||
} else {
|
||||
items = datasetTableRequest.applyCriteria();
|
||||
}*/
|
||||
|
||||
if (principal.getId() != null && datasetTableRequest.getCriteria().getRole() != null) {
|
||||
items.where((builder, root) -> {
|
||||
|
@ -177,15 +250,20 @@ public class DatasetManager {
|
|||
return dataTable;
|
||||
}
|
||||
|
||||
public DatasetWizardModel getSingle(String id) {
|
||||
public DatasetWizardModel getSingle(String id, Principal principal) {
|
||||
DatasetWizardModel dataset = new DatasetWizardModel();
|
||||
eu.eudat.data.entities.Dataset datasetEntity = databaseRepository.getDatasetDao().find(UUID.fromString(id), HintedModelFactory.getHint(DatasetWizardModel.class));
|
||||
if (datasetEntity.getDmp().getUsers()
|
||||
.stream().filter(userInfo -> userInfo.getUser().getId() == principal.getId())
|
||||
.collect(Collectors.toList()).size() == 0)
|
||||
throw new UnauthorisedException();
|
||||
eu.eudat.elastic.entities.Dataset datasetElastic;
|
||||
try {
|
||||
datasetElastic = datasetRepository.exists() ?
|
||||
datasetRepository.findDocument(id) : new eu.eudat.elastic.entities.Dataset();
|
||||
} catch (Exception ex) {
|
||||
datasetElastic = new eu.eudat.elastic.entities.Dataset();
|
||||
logger.warn(ex.getMessage());
|
||||
datasetElastic = null;
|
||||
}
|
||||
dataset.setDatasetProfileDefinition(getPagedProfile(dataset, datasetEntity));
|
||||
dataset.fromDataModel(datasetEntity);
|
||||
|
@ -222,7 +300,13 @@ public class DatasetManager {
|
|||
boolean latestVersion = profile.getVersion().toString().equals(datasetEntity.getProfile().getVersion().toString());
|
||||
dataset.setIsProfileLatestVersion(latestVersion);
|
||||
|
||||
if (datasetElastic != null && datasetElastic.getTags() != null && !datasetElastic.getTags().isEmpty()) {
|
||||
dataset.setTags(datasetElastic.getTags());
|
||||
}
|
||||
|
||||
/*if (datasetElastic != null && datasetElastic.getLabel() != null && !datasetElastic.getLabel().isEmpty()) {
|
||||
dataset.setLabel(datasetElastic.getLabel());
|
||||
}*/
|
||||
return dataset;
|
||||
}
|
||||
|
||||
|
@ -252,11 +336,11 @@ public class DatasetManager {
|
|||
return pagedDatasetProfile;
|
||||
}
|
||||
|
||||
public File getWordDocument(ConfigLoader configLoader, String id, VisibilityRuleService visibilityRuleService) throws IOException {
|
||||
private XWPFDocument getWordDocument(ConfigLoader configLoader, eu.eudat.data.entities.Dataset datasetEntity, VisibilityRuleService visibilityRuleService) throws IOException {
|
||||
WordBuilder wordBuilder = new WordBuilder();
|
||||
DatasetWizardModel dataset = new DatasetWizardModel();
|
||||
XWPFDocument document = configLoader.getDocument();
|
||||
eu.eudat.data.entities.Dataset datasetEntity = databaseRepository.getDatasetDao().find(UUID.fromString(id), HintedModelFactory.getHint(DatasetWizardModel.class));
|
||||
|
||||
wordBuilder.addParagraphContent(datasetEntity.getLabel(), document, ParagraphStyle.HEADER1, BigInteger.ZERO);
|
||||
|
||||
// Space below Dataset title.
|
||||
|
@ -327,16 +411,72 @@ public class DatasetManager {
|
|||
document.removeBodyElement(0);
|
||||
}
|
||||
|
||||
return document;
|
||||
//FileOutputStream out = new FileOutputStream(exportFile);
|
||||
// document.write(out);
|
||||
// out.close();
|
||||
// return exportFile;
|
||||
}
|
||||
|
||||
private XWPFDocument getLightWordDocument(ConfigLoader configLoader, DatasetWizardModel dataset, VisibilityRuleService visibilityRuleService) throws IOException {
|
||||
WordBuilder wordBuilder = new WordBuilder();
|
||||
XWPFDocument document = configLoader.getDocument();
|
||||
|
||||
// Space below Dataset title.
|
||||
XWPFParagraph parBreakDataset = document.createParagraph();
|
||||
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
if (dataset.getDatasetProfileDefinition() != null) {
|
||||
JSONObject jObject = new JSONObject(propertiesModelToString(dataset.getDatasetProfileDefinition()));
|
||||
properties = jObject.toMap();
|
||||
}
|
||||
|
||||
wordBuilder.addParagraphContent("Dataset Description", document, ParagraphStyle.HEADER2, BigInteger.ZERO);
|
||||
visibilityRuleService.setProperties(properties);
|
||||
visibilityRuleService.buildVisibilityContext(dataset.getDatasetProfileDefinition().getRules());
|
||||
wordBuilder.build(document, dataset.getDatasetProfileDefinition(), visibilityRuleService);
|
||||
|
||||
// Removes the top empty headings.
|
||||
for (int i = 0; i < 6; i++) {
|
||||
document.removeBodyElement(0);
|
||||
}
|
||||
|
||||
return document;
|
||||
//FileOutputStream out = new FileOutputStream(exportFile);
|
||||
// document.write(out);
|
||||
// out.close();
|
||||
// return exportFile;
|
||||
}
|
||||
|
||||
public File getWordDocumentFile(ConfigLoader configLoader, String id, VisibilityRuleService visibilityRuleService, Principal principal) throws IOException {
|
||||
eu.eudat.data.entities.Dataset datasetEntity = databaseRepository.getDatasetDao().find(UUID.fromString(id), HintedModelFactory.getHint(DatasetWizardModel.class));
|
||||
if (!datasetEntity.getDmp().isPublic() && datasetEntity.getDmp().getUsers()
|
||||
.stream().filter(userInfo -> userInfo.getUser().getId() == principal.getId())
|
||||
.collect(Collectors.toList()).size() == 0)
|
||||
throw new UnauthorisedException();
|
||||
String label = datasetEntity.getLabel().replaceAll("[^a-zA-Z0-9+ ]", "");
|
||||
File exportFile = new File(this.environment.getProperty("temp.temp") + label + ".docx");
|
||||
XWPFDocument document = getWordDocument(configLoader, datasetEntity, visibilityRuleService);
|
||||
FileOutputStream out = new FileOutputStream(exportFile);
|
||||
document.write(out);
|
||||
out.close();
|
||||
return exportFile;
|
||||
}
|
||||
|
||||
public FileEnvelope getXmlDocument(String id, VisibilityRuleService visibilityRuleService) throws InstantiationException, IllegalAccessException, IOException {
|
||||
public String getWordDocumentText (DatasetWizardModel datasetEntity) throws Exception {
|
||||
XWPFDocument document = getLightWordDocument(this.configLoader, datasetEntity, this.apiContext.getUtilitiesService().getVisibilityRuleService());
|
||||
XWPFWordExtractor extractor = new XWPFWordExtractor(document);
|
||||
return extractor.getText();/*.replaceAll("\n\\s*", " ");*/
|
||||
}
|
||||
|
||||
public FileEnvelope getXmlDocument(String id, VisibilityRuleService visibilityRuleService, Principal principal) throws InstantiationException, IllegalAccessException, IOException {
|
||||
ExportXmlBuilder xmlBuilder = new ExportXmlBuilder();
|
||||
DatasetWizardModel dataset = new DatasetWizardModel();
|
||||
eu.eudat.data.entities.Dataset datasetEntity = databaseRepository.getDatasetDao().find(UUID.fromString(id), HintedModelFactory.getHint(DatasetWizardModel.class));
|
||||
if (!datasetEntity.getDmp().isPublic() && datasetEntity.getDmp().getUsers()
|
||||
.stream().filter(userInfo -> userInfo.getUser().getId() == principal.getId())
|
||||
.collect(Collectors.toList()).size() == 0)
|
||||
throw new UnauthorisedException();
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
if (datasetEntity.getProperties() != null) {
|
||||
JSONObject jobject = new JSONObject(datasetEntity.getProperties());
|
||||
|
@ -345,7 +485,7 @@ public class DatasetManager {
|
|||
PagedDatasetProfile pagedDatasetProfile = getPagedProfile(dataset, datasetEntity);
|
||||
visibilityRuleService.setProperties(properties);
|
||||
visibilityRuleService.buildVisibilityContext(pagedDatasetProfile.getRules());
|
||||
File file = xmlBuilder.build(pagedDatasetProfile, datasetEntity.getProfile().getId(), visibilityRuleService);
|
||||
File file = xmlBuilder.build(pagedDatasetProfile, datasetEntity.getProfile().getId(), visibilityRuleService, environment);
|
||||
FileEnvelope fileEnvelope = new FileEnvelope();
|
||||
fileEnvelope.setFile(file);
|
||||
fileEnvelope.setFilename(datasetEntity.getLabel());
|
||||
|
@ -400,12 +540,23 @@ public class DatasetManager {
|
|||
}
|
||||
|
||||
public eu.eudat.data.entities.Dataset createOrUpdate(DatasetWizardModel datasetWizardModel, Principal principal) throws Exception {
|
||||
Boolean sendNotification = false;
|
||||
Dataset tempDataset = null;
|
||||
DMP dmp = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().find(datasetWizardModel.getDmp().getId());
|
||||
if (datasetWizardModel.getId() != null) {
|
||||
tempDataset = apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao().find(datasetWizardModel.getId());
|
||||
if (tempDataset != null) {
|
||||
if (datasetWizardModel.getModified().getTime() != tempDataset.getModified().getTime()) {
|
||||
throw new Exception("Dataset has been modified already by another user.");
|
||||
}
|
||||
sendNotification = true;
|
||||
}
|
||||
}
|
||||
if (dmp.getStatus().equals(DMP.DMPStatus.FINALISED.getValue()) && datasetWizardModel.getId() != null)
|
||||
throw new Exception("DMP is finalized, therefore Dataset cannot be edited.");
|
||||
eu.eudat.data.entities.Dataset dataset = datasetWizardModel.toDataModel();
|
||||
dataset.setDmp(dmp);
|
||||
propertiesModelToString(datasetWizardModel, dataset);
|
||||
dataset.setProperties(propertiesModelToString(datasetWizardModel.getDatasetProfileDefinition()));
|
||||
if (datasetWizardModel.getStatus() == (int) Dataset.Status.FINALISED.getValue())
|
||||
checkDatasetValidation(dataset);
|
||||
UserInfo userInfo = apiContext.getOperationsContext().getBuilderFactory().getBuilder(UserInfoBuilder.class).id(principal.getId()).build();
|
||||
|
@ -415,13 +566,49 @@ public class DatasetManager {
|
|||
createExternalDatasetsIfTheyDontExist(dataset);
|
||||
createRegistriesIfTheyDontExist(apiContext.getOperationsContext().getDatabaseRepository().getRegistryDao(), dataset);
|
||||
createServicesIfTheyDontExist(dataset);
|
||||
|
||||
Dataset dataset1 = apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao().createOrUpdate(dataset);
|
||||
datasetWizardModel.setId(dataset1.getId());
|
||||
if (datasetWizardModel.getDmp().getGrant() == null) {
|
||||
datasetWizardModel.setDmp(new DataManagementPlan().fromDataModelNoDatasets(dataset1.getDmp()));
|
||||
}
|
||||
dataset1.setProfile(this.apiContext.getOperationsContext().getDatabaseRepository().getDatasetProfileDao().find(datasetWizardModel.getProfile()));
|
||||
datasetWizardModel.setDatasetProfileDefinition(getPagedProfile(datasetWizardModel, dataset1));
|
||||
updateTags(apiContext.getOperationsContext().getDatasetRepository(), datasetWizardModel);
|
||||
if (sendNotification) {
|
||||
if (dataset1.getStatus() != Dataset.Status.FINALISED.getValue()) {
|
||||
this.sendNotification(dataset1, dataset1.getDmp(), userInfo, NotificationType.DATASET_MODIFIED);
|
||||
} else {
|
||||
this.sendNotification(dataset1, dataset1.getDmp(), userInfo, NotificationType.DATASET_MODIFIED_FINALISED);
|
||||
}
|
||||
}
|
||||
return dataset1;
|
||||
}
|
||||
|
||||
private void sendNotification(Dataset dataset, DMP dmp, UserInfo user, NotificationType notificationType) {
|
||||
List<UserDMP> userDMPS = databaseRepository.getUserDmpDao().asQueryable().where(((builder, root) -> builder.equal(root.get("dmp").get("id"), dmp.getId()))).toList();
|
||||
for (UserDMP userDMP : userDMPS) {
|
||||
if (!userDMP.getUser().getId().equals(user.getId())) {
|
||||
Notification notification = new Notification();
|
||||
notification.setUserId(user);
|
||||
notification.setType(notificationType);
|
||||
notification.setNotifyState(NotifyState.PENDING);
|
||||
notification.setIsActive(ActiveStatus.ACTIVE);
|
||||
notification.setData("{" +
|
||||
"\"userId\": \"" + userDMP.getUser().getId() + "\"" +
|
||||
", \"id\": \"" + dataset.getId() + "\"" +
|
||||
", \"name\": \"" + dataset.getLabel() + "\"" +
|
||||
", \"path\": \"" + notificationPaths.get(notificationType) + "\"" +
|
||||
"}");
|
||||
notification.setCreatedAt(new Date());
|
||||
notification.setUpdatedAt(notification.getCreatedAt());
|
||||
notification.setContactTypeHint(ContactType.EMAIL);
|
||||
notification.setContactHint(userDMP.getUser().getEmail());
|
||||
databaseRepository.getNotificationDao().createOrUpdate(notification);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void checkDatasetValidation(Dataset dataset) throws Exception {
|
||||
List<String> datasetProfileValidators = new LinkedList<>();
|
||||
DatasetProfile profile = apiContext.getOperationsContext().getDatabaseRepository().getDatasetProfileDao().find(dataset.getProfile().getId());
|
||||
|
@ -446,22 +633,73 @@ public class DatasetManager {
|
|||
}
|
||||
}
|
||||
|
||||
private void propertiesModelToString(DatasetWizardModel datasetWizardModel, eu.eudat.data.entities.Dataset dataset) {
|
||||
private String propertiesModelToString(PagedDatasetProfile pagedDatasetProfile) {
|
||||
Map<String, Object> values = new HashMap();
|
||||
PagedDatasetProfile properties = datasetWizardModel.getDatasetProfileDefinition();
|
||||
properties.toMap(values);
|
||||
pagedDatasetProfile.toMap(values);
|
||||
JSONObject jobject = new JSONObject(values);
|
||||
dataset.setProperties(jobject.toString());
|
||||
return jobject.toString();
|
||||
}
|
||||
|
||||
private void updateTags(DatasetRepository datasetRepository, DatasetWizardModel datasetWizardModel) throws IOException {
|
||||
if (datasetWizardModel.getTags() != null && !datasetWizardModel.getTags().isEmpty()) {
|
||||
public void updateTags(DatasetRepository datasetRepository, DatasetWizardModel datasetWizardModel) throws Exception {
|
||||
// if (datasetWizardModel.getTags() != null && !datasetWizardModel.getTags().isEmpty()) {
|
||||
eu.eudat.elastic.entities.Dataset dataset = new eu.eudat.elastic.entities.Dataset();
|
||||
dataset.setId(datasetWizardModel.getId().toString());
|
||||
if (datasetWizardModel.getTags() != null && !datasetWizardModel.getTags().isEmpty()) {
|
||||
DatasetCriteria criteria = new DatasetCriteria();
|
||||
criteria.setTags(datasetWizardModel.getTags());
|
||||
List<Tag> tags = apiContext.getOperationsContext().getDatasetRepository().query(criteria).stream().map(eu.eudat.elastic.entities.Dataset::getTags).flatMap(Collection::stream)
|
||||
.filter(StreamDistinctBy.distinctByKey(Tag::getId)).filter(tag -> datasetWizardModel.getTags().stream().anyMatch(tag1 -> tag1.getName().equals(tag.getName()))).collect(Collectors.toList());
|
||||
if (tags.isEmpty()) {
|
||||
datasetWizardModel.getTags().forEach(tag -> tag.setId(UUID.randomUUID().toString()));
|
||||
dataset.setTags(datasetWizardModel.getTags());
|
||||
} else {
|
||||
dataset.setTags(tags);
|
||||
}
|
||||
}
|
||||
dataset.setLabel(datasetWizardModel.getLabel());
|
||||
dataset.setDescription(datasetWizardModel.getDescription());
|
||||
dataset.setTemplate(datasetWizardModel.getProfile());
|
||||
dataset.setStatus(datasetWizardModel.getStatus());
|
||||
dataset.setDmp(datasetWizardModel.getDmp().getId());
|
||||
dataset.setGroup(datasetWizardModel.getDmp().getGroupId());
|
||||
dataset.setGrant(datasetWizardModel.getDmp().getGrant().getId());
|
||||
if (datasetWizardModel.getDmp().getUsers() != null) {
|
||||
dataset.setCollaborators(datasetWizardModel.getDmp().getUsers().stream().map(user -> {
|
||||
Collaborator collaborator = new Collaborator();
|
||||
collaborator.setId(user.getId().toString());
|
||||
collaborator.setName(user.getName());
|
||||
return collaborator;
|
||||
}).collect(Collectors.toList()));
|
||||
}
|
||||
DataManagementPlanCriteria dmpCriteria = new DataManagementPlanCriteria();
|
||||
dmpCriteria.setAllVersions(true);
|
||||
dmpCriteria.setGroupIds(Collections.singletonList(datasetWizardModel.getDmp().getGroupId()));
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getWithCriteria(dmpCriteria).toList().stream()
|
||||
.max(Comparator.comparing(DMP::getVersion)).ifPresent(dmp -> dataset.setLastVersion(dmp.getId().equals(datasetWizardModel.getDmp().getId())));
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getWithCriteria(dmpCriteria).toList().stream().filter(DMP::isPublic)
|
||||
.max(Comparator.comparing(DMP::getVersion)).ifPresent(dmp -> dataset.setLastPublicVersion(dmp.getId().equals(datasetWizardModel.getDmp().getId())));
|
||||
if (dataset.getLastVersion() == null) {
|
||||
dataset.setLastVersion(true);
|
||||
}
|
||||
if (dataset.getLastPublicVersion() == null) {
|
||||
dataset.setLastPublicVersion(false);
|
||||
}
|
||||
if (datasetWizardModel.getDmp().getOrganisations() != null) {
|
||||
dataset.setOrganizations(datasetWizardModel.getDmp().getOrganisations().stream().map(org -> {
|
||||
Organization organization = new Organization();
|
||||
organization.setId(org.getId());
|
||||
organization.setName(org.getName());
|
||||
return organization;
|
||||
}).collect(Collectors.toList()));
|
||||
}
|
||||
dataset.setPublic(datasetWizardModel.getDmp().getPublic());
|
||||
dataset.setGrantStatus(datasetWizardModel.getDmp().getGrant().getStatus());
|
||||
dataset.setFormData(this.getWordDocumentText(datasetWizardModel));
|
||||
datasetRepository.createOrUpdate(dataset);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void createRegistriesIfTheyDontExist(RegistryDao registryDao, eu.eudat.data.entities.Dataset dataset) {
|
||||
if (dataset.getRegistries() != null && !dataset.getRegistries().isEmpty()) {
|
||||
|
@ -544,8 +782,8 @@ public class DatasetManager {
|
|||
datasetDao.createOrUpdate(dataset);
|
||||
}
|
||||
|
||||
public ResponseEntity<byte[]> getDocument(String id, VisibilityRuleService visibilityRuleService, String contentType) throws IllegalAccessException, IOException, InstantiationException {
|
||||
FileEnvelope envelope = getXmlDocument(id, visibilityRuleService);
|
||||
public ResponseEntity<byte[]> getDocument(String id, VisibilityRuleService visibilityRuleService, String contentType, Principal principal) throws IllegalAccessException, IOException, InstantiationException {
|
||||
FileEnvelope envelope = getXmlDocument(id, visibilityRuleService, principal);
|
||||
InputStream resource = new FileInputStream(envelope.getFile());
|
||||
logger.info("Mime Type of " + envelope.getFilename() + " is " +
|
||||
new MimetypesFileTypeMap().getContentType(envelope.getFile()));
|
||||
|
@ -645,7 +883,8 @@ public class DatasetManager {
|
|||
datasetElastic = datasetRepository.exists() ?
|
||||
datasetRepository.findDocument(id) : new eu.eudat.elastic.entities.Dataset();
|
||||
} catch (Exception ex) {
|
||||
datasetElastic = new eu.eudat.elastic.entities.Dataset();
|
||||
logger.warn(ex.getMessage());
|
||||
datasetElastic = null;
|
||||
}
|
||||
dataset.setDatasetProfileDefinition(getPagedProfile(dataset, datasetEntity));
|
||||
dataset.fromDataModel(datasetEntity);
|
||||
|
@ -667,7 +906,12 @@ public class DatasetManager {
|
|||
// Now at latest version.
|
||||
dataset.setIsProfileLatestVersion(true);
|
||||
|
||||
if (datasetElastic != null && datasetElastic.getTags() != null && !datasetElastic.getTags().isEmpty()) {
|
||||
dataset.setTags(datasetElastic.getTags());
|
||||
}
|
||||
/*if (datasetElastic != null && datasetElastic.getLabel() != null && !datasetElastic.getLabel().isEmpty()) {
|
||||
dataset.setLabel(datasetElastic.getLabel());
|
||||
}*/
|
||||
return dataset;
|
||||
}
|
||||
|
||||
|
@ -697,4 +941,37 @@ public class DatasetManager {
|
|||
|
||||
return data;
|
||||
}
|
||||
|
||||
public void generateIndex(Principal principal) {
|
||||
if (principal.getAuthorities().contains(Authorities.ADMIN.getValue())) {
|
||||
this.apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().asQueryable().toList();
|
||||
List<DatasetWizardModel> datasetWizardModels = this.apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao().asQueryable().toList()
|
||||
.stream().map(dataset -> {
|
||||
DatasetWizardModel datasetWizardModel = new DatasetWizardModel().fromDataModel(dataset);
|
||||
datasetWizardModel.setDatasetProfileDefinition(getPagedProfile(datasetWizardModel, dataset));
|
||||
return datasetWizardModel;
|
||||
}).collect(Collectors.toList());
|
||||
datasetWizardModels.forEach(datasetWizardModel -> {
|
||||
try {
|
||||
eu.eudat.elastic.entities.Dataset dataset = apiContext.getOperationsContext().getDatasetRepository().findDocument(datasetWizardModel.getId().toString());
|
||||
if (dataset != null) {
|
||||
datasetWizardModel.setTags(dataset.getTags());
|
||||
}
|
||||
updateTags(apiContext.getOperationsContext().getDatasetRepository(), datasetWizardModel);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public void clearIndex(Principal principal) {
|
||||
if (principal.getAuthorities().contains(Authorities.ADMIN.getValue())) {
|
||||
try {
|
||||
this.apiContext.getOperationsContext().getDatasetRepository().clear();
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -113,8 +113,8 @@ public class DatasetProfileManager {
|
|||
ResponseEntity<Object> response = restTemplate.exchange(data.getUrl() + "?search=" + like, HttpMethod.GET, entity, Object.class);
|
||||
DocumentContext jsonContext = JsonPath.parse(response.getBody());
|
||||
|
||||
List<Map<String, String>> jsonItems = jsonContext.read(data.getOptionsRoot() + "['" + data.getAutoCompleteOptions().getLabel() + "','" + data.getAutoCompleteOptions().getValue() + "','" + data.getAutoCompleteOptions().getSource() + "']");
|
||||
jsonItems.forEach(item -> result.add(new ExternalAutocompleteFieldModel(item.get(data.getAutoCompleteOptions().getValue()), item.get(data.getAutoCompleteOptions().getLabel()), item.get(data.getAutoCompleteOptions().getSource()))));
|
||||
List<Map<String, String>> jsonItems = jsonContext.read(data.getOptionsRoot() + "['" + data.getAutoCompleteOptions().getLabel() + "','" + data.getAutoCompleteOptions().getValue() + "','" + data.getAutoCompleteOptions().getSource() + "','" + "uri" + "']");
|
||||
jsonItems.forEach(item -> result.add(new ExternalAutocompleteFieldModel(item.get(data.getAutoCompleteOptions().getValue()), item.get(data.getAutoCompleteOptions().getLabel()), item.get(data.getAutoCompleteOptions().getSource()), item.get("uri"))));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import eu.eudat.models.data.dmp.DataManagementPlan;
|
|||
import eu.eudat.models.data.security.Principal;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
@ -47,9 +48,14 @@ public class DatasetWizardManager {
|
|||
return;
|
||||
}
|
||||
|
||||
public void delete(ApiContext apiContext, UUID uuid) {
|
||||
public void delete(ApiContext apiContext, UUID uuid) throws IOException {
|
||||
Dataset oldDataset = apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao().find(uuid);
|
||||
oldDataset.setStatus(DMP.DMPStatus.DELETED.getValue());
|
||||
eu.eudat.elastic.entities.Dataset oldDatasetElasitc = apiContext.getOperationsContext().getDatasetRepository().findDocument(uuid.toString());
|
||||
oldDataset.setStatus(Dataset.Status.DELETED.getValue());
|
||||
oldDatasetElasitc.setStatus(oldDataset.getStatus());
|
||||
apiContext.getOperationsContext().getDatabaseRepository().getDatasetDao().createOrUpdate(oldDataset);
|
||||
if (uuid != null && oldDatasetElasitc.getId()!= null) {
|
||||
apiContext.getOperationsContext().getDatasetRepository().createOrUpdate(oldDatasetElasitc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,11 +43,13 @@ public class DocumentManager {
|
|||
private ApiContext context;
|
||||
private DatasetManager datasetManager;
|
||||
private ConfigLoader configLoader;
|
||||
private Environment environment;
|
||||
|
||||
public DocumentManager(ApiContext context, DatasetManager datasetManager, ConfigLoader configLoader) {
|
||||
public DocumentManager(ApiContext context, DatasetManager datasetManager, ConfigLoader configLoader, Environment environment) {
|
||||
this.context = context;
|
||||
this.datasetManager = datasetManager;
|
||||
this.configLoader = configLoader;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
public File getWordDocument(ConfigLoader configLoader, DatasetDao datatasetRepository, String id, VisibilityRuleService visibilityRuleService) throws InstantiationException, IllegalAccessException, IOException {
|
||||
|
@ -82,7 +84,7 @@ public class DocumentManager {
|
|||
PagedDatasetProfile pagedDatasetProfile = datasetManager.getPagedProfile(dataset, datasetEntity);
|
||||
visibilityRuleService.setProperties(properties);
|
||||
visibilityRuleService.buildVisibilityContext(pagedDatasetProfile.getRules());
|
||||
File file = xmlBuilder.build(pagedDatasetProfile, UUID.fromString(id), visibilityRuleService);
|
||||
File file = xmlBuilder.build(pagedDatasetProfile, UUID.fromString(id), visibilityRuleService, environment);
|
||||
FileEnvelope fileEnvelope = new FileEnvelope();
|
||||
fileEnvelope.setFile(file);
|
||||
fileEnvelope.setFilename(datasetEntity.getLabel());
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.logic.services.ApiContext;
|
||||
import eu.eudat.models.data.lock.Lock;
|
||||
import eu.eudat.models.data.security.Principal;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.persistence.NoResultException;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
@Component
|
||||
public class LockManager {
|
||||
private final Comparator<eu.eudat.data.entities.Lock> compareByTouchedAt = Comparator.comparing(o -> o.getTouchedAt().getTime());
|
||||
|
||||
private ApiContext apiContext;
|
||||
private Environment environment;
|
||||
|
||||
@Autowired
|
||||
public LockManager(ApiContext apiContext, Environment environment) {
|
||||
this.apiContext = apiContext;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
public eu.eudat.data.entities.Lock createOrUpdate(Lock lock, Principal principal) throws Exception {
|
||||
if (lock.getId() != null) {
|
||||
try {
|
||||
eu.eudat.data.entities.Lock entity = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().find(lock.getId());
|
||||
if (entity != null) {
|
||||
if (!entity.getLockedBy().getId().equals(principal.getId())) {
|
||||
throw new Exception("Is not locked by that user");
|
||||
}
|
||||
}
|
||||
}catch(NoResultException e) {
|
||||
lock.setId(null);
|
||||
}
|
||||
}
|
||||
eu.eudat.data.entities.Lock newLock = lock.toDataModel();
|
||||
newLock = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().createOrUpdate(newLock);
|
||||
|
||||
return newLock;
|
||||
}
|
||||
|
||||
public boolean isLocked(String targetId, Principal principal) throws Exception {
|
||||
LockCriteria criteria = new LockCriteria();
|
||||
criteria.setTarget(UUID.fromString(targetId));
|
||||
Long availableLocks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).count();
|
||||
if (availableLocks == 1) {
|
||||
eu.eudat.data.entities.Lock lock = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).getSingle();
|
||||
if (lock.getLockedBy().getId().equals(principal.getId())) {
|
||||
lock.setTouchedAt(new Date());
|
||||
this.createOrUpdate(new Lock().fromDataModel(lock), principal);
|
||||
return false;
|
||||
}
|
||||
return this.forceUnlock(targetId) > 0;
|
||||
} else if (availableLocks > 1) {
|
||||
this.forceUnlock(targetId);
|
||||
return this.isLocked(targetId, principal);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private Long forceUnlock(String targetId) {
|
||||
LockCriteria criteria = new LockCriteria();
|
||||
criteria.setTarget(UUID.fromString(targetId));
|
||||
Long availableLocks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).count();
|
||||
long deletedLocks = 0L;
|
||||
if (availableLocks > 0) {
|
||||
List<eu.eudat.data.entities.Lock> locks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).toList();
|
||||
for (eu.eudat.data.entities.Lock lock : locks) {
|
||||
if (new Date().getTime() - lock.getTouchedAt().getTime() > environment.getProperty("database.lock-fail-interval", Integer.class)) {
|
||||
this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().delete(lock);
|
||||
deletedLocks++;
|
||||
}
|
||||
}
|
||||
if (deletedLocks == 0) {
|
||||
eu.eudat.data.entities.Lock recentlock = locks.stream().max(compareByTouchedAt).get();
|
||||
for (eu.eudat.data.entities.Lock lock : locks) {
|
||||
if (lock != recentlock) {
|
||||
this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().delete(lock);
|
||||
deletedLocks++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return availableLocks - deletedLocks;
|
||||
}
|
||||
|
||||
public void unlock(String targetId, Principal principal) throws Exception {
|
||||
LockCriteria criteria = new LockCriteria();
|
||||
criteria.setTarget(UUID.fromString(targetId));
|
||||
Long availableLocks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).count();
|
||||
if (availableLocks == 1) {
|
||||
eu.eudat.data.entities.Lock lock = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).getSingle();
|
||||
if (!lock.getLockedBy().getId().equals(principal.getId())) {
|
||||
throw new Exception("Only the user who created that lock can delete it");
|
||||
}
|
||||
this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().delete(lock);
|
||||
} else if (availableLocks > 1) {
|
||||
List<eu.eudat.data.entities.Lock> locks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).toList();
|
||||
locks.stream().filter(lock -> lock.getLockedBy().getId().equals(principal.getId())).forEach(lock -> this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().delete(lock));
|
||||
}
|
||||
}
|
||||
|
||||
public Lock getFromTarget(String targetId, Principal principal) throws Exception {
|
||||
LockCriteria criteria = new LockCriteria();
|
||||
criteria.setTarget(UUID.fromString(targetId));
|
||||
Long availableLocks = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).count();
|
||||
if (availableLocks > 0) {
|
||||
eu.eudat.data.entities.Lock lock = this.apiContext.getOperationsContext().getDatabaseRepository().getLockDao().getWithCriteria(criteria).getSingle();
|
||||
if (!lock.getLockedBy().getId().equals(principal.getId())) {
|
||||
throw new Exception("Only the user who created that lock can access it");
|
||||
}
|
||||
return new Lock().fromDataModel(lock);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.data.entities.Notification;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
import eu.eudat.logic.services.ApiContext;
|
||||
import eu.eudat.logic.services.utilities.MailService;
|
||||
import eu.eudat.models.data.mail.SimpleMail;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.mail.MessagingException;
|
||||
import javax.transaction.Transactional;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
@Component
|
||||
public class NotificationManager {
|
||||
private static final Logger logger = LoggerFactory.getLogger(NotificationManager.class);
|
||||
|
||||
private ApiContext apiContext;
|
||||
private Environment environment;
|
||||
private MailService mailService;
|
||||
|
||||
@Autowired
|
||||
public NotificationManager(ApiContext apiContext, Environment environment, MailService mailService) {
|
||||
this.apiContext = apiContext;
|
||||
this.environment = environment;
|
||||
this.mailService = mailService;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public void sendNotification(Notification notification) throws Exception {
|
||||
if (notification.getNotifyState() == NotifyState.ERROR) {
|
||||
if (notification.getRetryCount() == null) {
|
||||
notification.setRetryCount(0);
|
||||
}
|
||||
notification.setRetryCount(notification.getRetryCount() + 1);
|
||||
if (notification.getRetryCount() >= this.environment.getProperty("notification.maxRetries", Integer.class)) {
|
||||
notification.setIsActive(ActiveStatus.INACTIVE);
|
||||
notification.setUpdatedAt(new Date());
|
||||
return;
|
||||
}
|
||||
}
|
||||
notification.setNotifyState(NotifyState.PROCESSING);
|
||||
notification.setNotifiedAt(new Date());
|
||||
notification.setUpdatedAt(new Date());
|
||||
try {
|
||||
Map<String, String> data = new ObjectMapper().readValue(notification.getData(), HashMap.class);
|
||||
UserInfo userInfo = this.apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(UUID.fromString(data.get("userId")));
|
||||
String subjectTemplate = "";
|
||||
String contentTemplate = "";
|
||||
|
||||
switch (notification.getType()) {
|
||||
case DMP_MODIFIED:
|
||||
case DATASET_MODIFIED:
|
||||
subjectTemplate = this.environment.getProperty("notification.modified.subject");
|
||||
contentTemplate = mailService.getMailTemplateContent("classpath:templates/notifications/modifiedNotification.html");
|
||||
break;
|
||||
case DMP_PUBLISH:
|
||||
subjectTemplate = this.environment.getProperty("notification.publish.subject");
|
||||
contentTemplate = mailService.getMailTemplateContent("classpath:templates/notifications/publishNotification.html");
|
||||
break;
|
||||
case DMP_FINALISED:
|
||||
subjectTemplate = this.environment.getProperty("notification.finalised.subject");
|
||||
contentTemplate = mailService.getMailTemplateContent("classpath:templates/notifications/finalisedNotification.html");
|
||||
break;
|
||||
case DMP_MODIFIED_FINALISED:
|
||||
case DATASET_MODIFIED_FINALISED:
|
||||
subjectTemplate = this.environment.getProperty("notification.modifiedFinalised.subject");
|
||||
contentTemplate = mailService.getMailTemplateContent("classpath:templates/notifications/modifiedFinalisedNotification.html");
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
switch (notification.getContactTypeHint()) {
|
||||
case EMAIL:
|
||||
this.sendEmailNotification(notification, userInfo, data, subjectTemplate, contentTemplate);
|
||||
break;
|
||||
}
|
||||
}catch (Exception e) {
|
||||
notification.setNotifyState(NotifyState.ERROR);
|
||||
notification.setUpdatedAt(new Date());
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private void sendEmailNotification(Notification notification, UserInfo userInfo, Map<String, String> data, String subjectTemplate, String contentTemplate) throws IOException {
|
||||
SimpleMail simpleMail = new SimpleMail();
|
||||
simpleMail.setFrom(this.environment.getProperty("mail.from"));
|
||||
simpleMail.setSubject(makeSubject(data, subjectTemplate));
|
||||
simpleMail.setTo(notification.getContactHint());
|
||||
simpleMail.setContent(makeContent(data, notification, userInfo, contentTemplate));
|
||||
try {
|
||||
mailService.sendSimpleMail(simpleMail);
|
||||
notification.setNotifyState(NotifyState.SUCCEEDED);
|
||||
notification.setUpdatedAt(new Date());
|
||||
} catch (MessagingException e) {
|
||||
notification.setNotifyState(NotifyState.ERROR);
|
||||
notification.setUpdatedAt(new Date());
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private String makeSubject(Map<String, String> data, String subjectTemplate) {
|
||||
return subjectTemplate.replace("{name}", data.get("name"));
|
||||
}
|
||||
|
||||
private String makeContent(Map<String, String> data, Notification notification, UserInfo userInfo, String template) {
|
||||
String content = template;
|
||||
content = content.replace("{recipient}", userInfo.getName());
|
||||
for (String key : data.keySet()) {
|
||||
content = content.replace("{" + key +"}", data.get(key));
|
||||
}
|
||||
content = content.replace("{host}", this.environment.getProperty("dmp.domain"));
|
||||
content = content.replace("{reasonName}", notification.getUserId().getName());
|
||||
return content;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.models.rda.Dmp;
|
||||
import eu.eudat.models.rda.RDAModel;
|
||||
import eu.eudat.models.rda.mapper.DmpRDAMapper;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.io.IOException;
|
||||
import java.text.SimpleDateFormat;
|
||||
|
||||
@Component
|
||||
public class RDAManager {
|
||||
|
||||
private DmpRDAMapper dmpRDAMapper;
|
||||
|
||||
@Autowired
|
||||
public RDAManager(DmpRDAMapper dmpRDAMapper) {
|
||||
this.dmpRDAMapper = dmpRDAMapper;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public String convertToRDA(DMP dmp) throws JsonProcessingException {
|
||||
String result = "";
|
||||
|
||||
Dmp rdaDmp = dmpRDAMapper.toRDA(dmp);
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss Z"));
|
||||
|
||||
result = mapper.writeValueAsString(rdaDmp);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public DMP convertToEntity(String json) throws IOException {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss Z"));
|
||||
|
||||
Dmp rda = mapper.readValue(json, Dmp.class);
|
||||
return dmpRDAMapper.toEntity(rda);
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
package eu.eudat.logic.managers;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanCriteria;
|
||||
import eu.eudat.data.dao.entities.UserInfoDao;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
|
@ -64,7 +65,10 @@ public class UserManager {
|
|||
eu.eudat.data.entities.UserInfo user = apiContext.getOperationsContext().getDatabaseRepository().getUserInfoDao().find(userId);
|
||||
UserProfile profile = new UserProfile().fromDataModel(user);
|
||||
List<Integer> roles = new LinkedList<>();
|
||||
List<DMP> dmps = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getAuthenticated(apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().asQueryable(), userId, roles).take(5).toList();
|
||||
DataManagementPlanCriteria criteria = new DataManagementPlanCriteria();
|
||||
criteria.setAllVersions(false);
|
||||
QueryableList<DMP> items = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getWithCriteria(criteria);
|
||||
List<DMP> dmps = apiContext.getOperationsContext().getDatabaseRepository().getDmpDao().getAuthenticated(items, userId, roles).take(5).toList();
|
||||
profile.setAssociatedDmps(dmps.stream().map(x -> new DataManagementPlan().fromDataModel(x)).collect(Collectors.toList()));
|
||||
return profile;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,9 @@ public class DataFieldsUrlConfiguration {
|
|||
private String description;
|
||||
private String source;
|
||||
private String count;
|
||||
private String path;
|
||||
private String host;
|
||||
private String types;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
|
@ -67,4 +70,28 @@ public class DataFieldsUrlConfiguration {
|
|||
public void setCount(String count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
@XmlElement(name = "path")
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
@XmlElement(name = "host")
|
||||
public void setHost(String host) {
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
@XmlElement(name = "types")
|
||||
public String getTypes() {
|
||||
return types;
|
||||
}
|
||||
public void setTypes(String types) {
|
||||
this.types = types;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package eu.eudat.logic.proxy.config;
|
||||
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlElementWrapper;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Created by ikalyvas on 6/29/2018.
|
||||
|
@ -8,6 +10,11 @@ import javax.xml.bind.annotation.XmlElement;
|
|||
public class DataUrlConfiguration {
|
||||
private String path;
|
||||
private DataFieldsUrlConfiguration fieldsUrlConfiguration;
|
||||
private UrlConfiguration urlConfiguration;
|
||||
private String parseClass;
|
||||
private String parseField;
|
||||
private List<String> mergedFields;
|
||||
private String mergedFieldName;
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
|
@ -26,4 +33,50 @@ public class DataUrlConfiguration {
|
|||
public void setFieldsUrlConfiguration(DataFieldsUrlConfiguration fieldsUrlConfiguration) {
|
||||
this.fieldsUrlConfiguration = fieldsUrlConfiguration;
|
||||
}
|
||||
|
||||
public UrlConfiguration getUrlConfiguration() {
|
||||
return urlConfiguration;
|
||||
}
|
||||
|
||||
@XmlElement(name = "urlConfig")
|
||||
public void setUrlConfiguration(UrlConfiguration urlConfiguration) {
|
||||
this.urlConfiguration = urlConfiguration;
|
||||
}
|
||||
|
||||
public String getParseClass() {
|
||||
return parseClass;
|
||||
}
|
||||
|
||||
@XmlElement(name = "parse-class")
|
||||
public void setParseClass(String parseClass) {
|
||||
this.parseClass = parseClass;
|
||||
}
|
||||
|
||||
public String getParseField() {
|
||||
return parseField;
|
||||
}
|
||||
|
||||
@XmlElement(name = "parse-field")
|
||||
public void setParseField(String parseField) {
|
||||
this.parseField = parseField;
|
||||
}
|
||||
|
||||
public List<String> getMergedFields() {
|
||||
return mergedFields;
|
||||
}
|
||||
|
||||
@XmlElementWrapper(name = "merge-fields")
|
||||
@XmlElement(name = "field")
|
||||
public void setMergedFields(List<String> mergedFields) {
|
||||
this.mergedFields = mergedFields;
|
||||
}
|
||||
|
||||
public String getMergedFieldName() {
|
||||
return mergedFieldName;
|
||||
}
|
||||
|
||||
@XmlElement(name = "merge-field-name")
|
||||
public void setMergedFieldName(String mergedFieldName) {
|
||||
this.mergedFieldName = mergedFieldName;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ public class ExternalUrlCriteria {
|
|||
private String page;
|
||||
private String pageSize;
|
||||
private String funderId;
|
||||
private String path;
|
||||
private String host;
|
||||
|
||||
public String getLike() {
|
||||
return like;
|
||||
|
@ -34,6 +36,22 @@ public class ExternalUrlCriteria {
|
|||
this.funderId = funderId;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
|
||||
public void setHost(String host) {
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
public ExternalUrlCriteria(String like) {
|
||||
this.like = like;
|
||||
}
|
||||
|
|
|
@ -4,19 +4,25 @@ import com.fasterxml.jackson.core.type.TypeReference;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.jayway.jsonpath.DocumentContext;
|
||||
import com.jayway.jsonpath.JsonPath;
|
||||
import eu.eudat.logic.proxy.config.*;
|
||||
import eu.eudat.logic.proxy.config.DataUrlConfiguration;
|
||||
import eu.eudat.logic.proxy.config.ExternalUrlCriteria;
|
||||
import eu.eudat.logic.proxy.config.FetchStrategy;
|
||||
import eu.eudat.logic.proxy.config.UrlConfiguration;
|
||||
import eu.eudat.logic.proxy.config.configloaders.ConfigLoader;
|
||||
import eu.eudat.logic.proxy.config.exceptions.HugeResultSet;
|
||||
import eu.eudat.logic.proxy.config.exceptions.NoURLFound;
|
||||
import net.minidev.json.JSONArray;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.cache.annotation.Cacheable;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import javax.xml.bind.JAXBContext;
|
||||
import javax.xml.bind.Unmarshaller;
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.io.*;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
|
@ -154,10 +160,11 @@ public class RemoteFetcher {
|
|||
private String replaceCriteriaOnUrl(String path, ExternalUrlCriteria externalUrlCriteria, String firstPage) {
|
||||
String completedPath = path;
|
||||
if (externalUrlCriteria.getLike() != null) {
|
||||
if (path.contains("openaire") && externalUrlCriteria.getLike().equals(""))
|
||||
if ((path.contains("openaire") || path.contains("orcid") || path.contains("ror")) && externalUrlCriteria.getLike().equals("")) {
|
||||
completedPath = completedPath.replaceAll("\\{like}", "*");
|
||||
else
|
||||
} else {
|
||||
completedPath = completedPath.replaceAll("\\{like}", externalUrlCriteria.getLike());
|
||||
}
|
||||
} else {
|
||||
completedPath = completedPath.replace("{like}", "");
|
||||
}
|
||||
|
@ -184,6 +191,16 @@ public class RemoteFetcher {
|
|||
} else {
|
||||
completedPath = completedPath.replace("{pageSize}", "60");
|
||||
}
|
||||
if (externalUrlCriteria.getHost() != null) {
|
||||
completedPath = completedPath.replace("{host}", externalUrlCriteria.getHost());
|
||||
} else {
|
||||
completedPath = completedPath.replace("{host}", "");
|
||||
}
|
||||
if (externalUrlCriteria.getPath() != null) {
|
||||
completedPath = completedPath.replace("{path}", externalUrlCriteria.getPath());
|
||||
} else {
|
||||
completedPath = completedPath.replace("{path}", "");
|
||||
}
|
||||
return completedPath;
|
||||
}
|
||||
|
||||
|
@ -225,13 +242,17 @@ public class RemoteFetcher {
|
|||
|
||||
HttpURLConnection con = (HttpURLConnection) url.openConnection();
|
||||
con.setRequestMethod("GET");
|
||||
if (contentType != null && !contentType.isEmpty()) {
|
||||
con.setRequestProperty("Accept", contentType);
|
||||
}
|
||||
|
||||
int responseCode = con.getResponseCode();
|
||||
if (responseCode == HttpURLConnection.HTTP_OK) { // success
|
||||
//do here all the parsing
|
||||
Results results = new Results();
|
||||
if (con.getHeaderField("Content-Type").contains("json")) {
|
||||
DocumentContext jsonContext = JsonPath.parse(con.getInputStream());
|
||||
Results results;
|
||||
|
||||
if (jsonDataPath.getFieldsUrlConfiguration().getSource() != null) {
|
||||
results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getName() + "," + jsonDataPath.getFieldsUrlConfiguration().getDescription()
|
||||
|
@ -243,6 +264,42 @@ public class RemoteFetcher {
|
|||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getName()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getId() + "]"),
|
||||
new HashMap<>(1, 1));
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getPath() != null) {
|
||||
results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getPath()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getHost() + "]"),
|
||||
new HashMap<>(1, 1));
|
||||
|
||||
List<Map<String, String>> multiResults = results.results.stream().map(result -> {
|
||||
ExternalUrlCriteria externalUrlCriteria = new ExternalUrlCriteria();
|
||||
externalUrlCriteria.setPath(result.get("path"));
|
||||
externalUrlCriteria.setHost(result.get("host"));
|
||||
String replacedPath = replaceCriteriaOnUrl(jsonDataPath.getUrlConfiguration().getUrl(), externalUrlCriteria, jsonDataPath.getUrlConfiguration().getFirstpage());
|
||||
return getResultsFromUrl(replacedPath, jsonDataPath.getUrlConfiguration().getData(), jsonDataPath.getUrlConfiguration().getData().getPath(), jsonDataPath.getUrlConfiguration().getContentType());
|
||||
}).filter(Objects::nonNull).map(results1 -> results1.results.get(0)).collect(Collectors.toList());
|
||||
results = new Results(multiResults, new HashMap<>(1, 1));
|
||||
} else if (jsonDataPath.getFieldsUrlConfiguration().getTypes() != null) {
|
||||
List<Map<String, Object>> tempRes = jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getId() + "," + jsonDataPath.getFieldsUrlConfiguration().getName()
|
||||
+ "," + jsonDataPath.getFieldsUrlConfiguration().getTypes() + "," + jsonDataPath.getFieldsUrlConfiguration().getUri() + "]");
|
||||
List<Map<String, String>> finalRes = new ArrayList<>();
|
||||
tempRes.forEach(map -> {
|
||||
Map<String, String> resMap = new HashMap<>();
|
||||
map.forEach((key, value) -> {
|
||||
if (key.equals(jsonDataPath.getFieldsUrlConfiguration().getTypes().substring(1, jsonDataPath.getFieldsUrlConfiguration().getTypes().length() - 1))) {
|
||||
resMap.put("tags", ((JSONArray) value).toJSONString());
|
||||
} else if (key.equals(jsonDataPath.getFieldsUrlConfiguration().getUri().substring(1, jsonDataPath.getFieldsUrlConfiguration().getTypes().length() - 1))) {
|
||||
resMap.put(key, ((JSONArray) value).toJSONString());
|
||||
} else {
|
||||
resMap.put(key, (String) value);
|
||||
}
|
||||
});
|
||||
|
||||
finalRes.add(resMap);
|
||||
});
|
||||
|
||||
results = new Results(finalRes,
|
||||
new HashMap<>(1, 1));
|
||||
} else {
|
||||
results = new Results(jsonContext.read(jsonDataPath.getPath()
|
||||
+ "[" + jsonDataPath.getFieldsUrlConfiguration().getName() + "," + jsonDataPath.getFieldsUrlConfiguration().getDescription()
|
||||
|
@ -251,6 +308,58 @@ public class RemoteFetcher {
|
|||
}
|
||||
results.results = results.results.stream().map(e -> e.entrySet().stream().collect(Collectors.toMap(x -> this.transformKey(jsonDataPath,x.getKey()), Map.Entry::getValue)))
|
||||
.collect(Collectors.toList());
|
||||
} else if (con.getHeaderField("Content-Type").contains("xml")) {
|
||||
Class<?> aClass = Class.forName(jsonDataPath.getParseClass());
|
||||
JAXBContext jaxbContext = JAXBContext.newInstance(aClass);
|
||||
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
|
||||
Object data = unmarshaller.unmarshal(con.getInputStream());
|
||||
Method reader = null;
|
||||
if (jsonDataPath.getParseField() != null && !jsonDataPath.getParseField().isEmpty()) {
|
||||
reader = new PropertyDescriptor(jsonDataPath.getParseField(), aClass).getReadMethod();
|
||||
}
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
List<Map<String, String>> values = new ArrayList<>();
|
||||
int max = 1;
|
||||
if (reader != null) {
|
||||
Object invokedField = reader.invoke(data);
|
||||
if (invokedField instanceof Collection) {
|
||||
max = ((Collection) invokedField).size();
|
||||
}
|
||||
}
|
||||
for (int i = 0; i< max; i++) {
|
||||
Object value;
|
||||
if (reader != null) {
|
||||
Object invokedField = reader.invoke(data);
|
||||
if (invokedField instanceof Collection) {
|
||||
value = ((Collection) invokedField).toArray()[i];
|
||||
} else {
|
||||
value = invokedField;
|
||||
}
|
||||
} else {
|
||||
value = data;
|
||||
}
|
||||
Map<String, String> map = objectMapper.convertValue(value, Map.class);
|
||||
if (jsonDataPath.getMergedFields() != null && !jsonDataPath.getMergedFields().isEmpty() && jsonDataPath.getMergedFieldName() != null && !jsonDataPath.getMergedFieldName().isEmpty()) {
|
||||
Map<String, String> finalMap = new HashMap<>();
|
||||
for (Map.Entry<String, String> entry : map.entrySet()) {
|
||||
if (jsonDataPath.getMergedFields().contains(entry.getKey())) {
|
||||
if (!finalMap.containsKey(jsonDataPath.getMergedFieldName())) {
|
||||
finalMap.put(jsonDataPath.getMergedFieldName(), entry.getValue());
|
||||
} else {
|
||||
finalMap.put(jsonDataPath.getMergedFieldName(), finalMap.get(jsonDataPath.getMergedFieldName()) + " " + entry.getValue());
|
||||
}
|
||||
} else {
|
||||
finalMap.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
values.add(finalMap);
|
||||
} else {
|
||||
values.add(map);
|
||||
}
|
||||
}
|
||||
results = new Results(values, new HashMap<>(1, 1));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
} catch (MalformedURLException e1) {
|
||||
|
@ -302,6 +411,8 @@ public class RemoteFetcher {
|
|||
if (dataUrlConfiguration.getFieldsUrlConfiguration().getName() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getName().replace("'",""))) return "name";
|
||||
if (dataUrlConfiguration.getFieldsUrlConfiguration().getSource() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getSource().replace("'",""))) return "source";
|
||||
if (dataUrlConfiguration.getFieldsUrlConfiguration().getCount() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getCount().replace("'",""))) return "count";
|
||||
if (dataUrlConfiguration.getFieldsUrlConfiguration().getPath() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getPath().replace("'",""))) return "path";
|
||||
if (dataUrlConfiguration.getFieldsUrlConfiguration().getHost() != null && key.equals(dataUrlConfiguration.getFieldsUrlConfiguration().getHost().replace("'",""))) return "host";
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -311,8 +422,8 @@ public class RemoteFetcher {
|
|||
Map<String, Integer> pagination;
|
||||
|
||||
Results() {
|
||||
this.results = new ArrayList<Map<String, String>>();
|
||||
this.pagination = new HashMap<String, Integer>();
|
||||
this.results = new ArrayList<>();
|
||||
this.pagination = new HashMap<>();
|
||||
}
|
||||
|
||||
Results(List<Map<String, String>> results, Map<String, Integer> pagination) {
|
||||
|
|
|
@ -52,5 +52,9 @@ public interface DatabaseRepository {
|
|||
|
||||
FunderDao getFunderDao();
|
||||
|
||||
LockDao getLockDao();
|
||||
|
||||
NotificationDao getNotificationDao();
|
||||
|
||||
<T> void detachEntity(T entity);
|
||||
}
|
||||
|
|
|
@ -35,6 +35,8 @@ public class DatabaseRepositoryImpl implements DatabaseRepository {
|
|||
private LoginConfirmationEmailDao loginConfirmationEmailDao;
|
||||
private ProjectDao projectDao;
|
||||
private FunderDao funderDao;
|
||||
private LockDao lockDao;
|
||||
private NotificationDao notificationDao;
|
||||
|
||||
private EntityManager entityManager;
|
||||
|
||||
|
@ -273,6 +275,26 @@ public class DatabaseRepositoryImpl implements DatabaseRepository {
|
|||
this.funderDao = funderDao;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setLockDao(LockDao lockDao) {
|
||||
this.lockDao = lockDao;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LockDao getLockDao() {
|
||||
return lockDao;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NotificationDao getNotificationDao() {
|
||||
return notificationDao;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setNotificationDao(NotificationDao notificationDao) {
|
||||
this.notificationDao = notificationDao;
|
||||
}
|
||||
|
||||
public <T> void detachEntity(T entity) {
|
||||
this.entityManager.detach(entity);
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class ConfirmationEmailServiceImpl implements ConfirmationEmailService {
|
|||
}
|
||||
|
||||
private String createContent(UUID confirmationToken, MailService mailService) {
|
||||
String content = mailService.getMailTemplateContent("classpath:emailConfirmation.html");
|
||||
String content = mailService.getMailTemplateContent("classpath:templates/email/emailConfirmation.html");
|
||||
content = content.replace("{confirmationToken}", confirmationToken.toString());
|
||||
content = content.replace("{expiration_time}", secondsToTime(Integer.parseInt(this.environment.getProperty("conf_email.expiration_time_seconds"))));
|
||||
content = content.replace("{host}", this.environment.getProperty("dmp.domain"));
|
||||
|
|
|
@ -71,7 +71,7 @@ public class InvitationServiceImpl implements InvitationService {
|
|||
return CompletableFuture.runAsync(() -> {
|
||||
SimpleMail mail = new SimpleMail();
|
||||
mail.setSubject(createSubject(dmp, mailService.getMailTemplateSubject()));
|
||||
mail.setContent(createContent(invitation.getId(), dmp, recipient, mailService.getMailTemplateContent("classpath:email.html")));
|
||||
mail.setContent(createContent(invitation.getId(), dmp, recipient, mailService.getMailTemplateContent("classpath:templates/email/email.html")));
|
||||
mail.setTo(invitation.getInvitationEmail());
|
||||
try {
|
||||
mailService.sendSimpleMail(mail);
|
||||
|
|
|
@ -12,6 +12,7 @@ import org.xml.sax.SAXException;
|
|||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
|
@ -47,6 +48,8 @@ public class XmlBuilder {
|
|||
StringWriter writer = new StringWriter();
|
||||
StreamResult result = new StreamResult(writer);
|
||||
DOMSource source = new DOMSource(doc);
|
||||
trans.setOutputProperty(OutputKeys.INDENT, "yes");
|
||||
trans.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
|
||||
trans.transform(source, result);
|
||||
return writer.toString();
|
||||
} catch (TransformerException e) {
|
||||
|
|
|
@ -136,7 +136,11 @@ public class WordBuilder {
|
|||
|
||||
private void createPages(List<DatasetProfilePage> datasetProfilePages, XWPFDocument mainDocumentPart, Boolean createListing, VisibilityRuleService visibilityRuleService) {
|
||||
datasetProfilePages.forEach(item -> {
|
||||
try {
|
||||
createSections(item.getSections(), mainDocumentPart, ParagraphStyle.HEADER4, 0, createListing, visibilityRuleService);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -258,7 +262,7 @@ public class WordBuilder {
|
|||
}
|
||||
return sb.toString();
|
||||
} else if (comboboxType.equals("wordlist")) {
|
||||
return field.getValue().toString();
|
||||
return field.getValue() != null ? field.getValue().toString() : "";
|
||||
}
|
||||
}
|
||||
case "booleanDecision":
|
||||
|
@ -271,11 +275,9 @@ public class WordBuilder {
|
|||
if (field.getValue() == null || field.getValue().equals("false")) return null;
|
||||
return data.getLabel();
|
||||
case "freetext":
|
||||
return field.getValue().toString();
|
||||
case "textarea":
|
||||
return field.getValue().toString();
|
||||
case "datepicker":
|
||||
return field.getValue().toString();
|
||||
case "textarea":
|
||||
return field.getValue() != null ? field.getValue().toString(): "";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue