Compare commits
No commits in common. "master" and "ui-refactoring" have entirely different histories.
master
...
ui-refacto
11
.env
11
.env
|
@ -1,5 +1,6 @@
|
|||
PROFILE=docker
|
||||
|
||||
# Version of Elastic products
|
||||
ELK_VERSION=7.17.4
|
||||
STACK_VERSION=7.17.4
|
||||
TAG=6.3.0
|
||||
ENV=prod
|
||||
PROFILE=production
|
||||
AOT=aot
|
||||
ELASTIC_VERSION=6.3.0
|
||||
ELASTIC_PASSWORD=changeme
|
||||
|
|
|
@ -35,17 +35,3 @@ temp/
|
|||
*.lst
|
||||
dmp-frontend/.vscode/
|
||||
*.docx
|
||||
|
||||
|
||||
dmp-frontend/package-lock.json
|
||||
dmp-backend/logging/target/
|
||||
ELK.Docker/shared/data-elk/
|
||||
|
||||
# Eclipse
|
||||
.project
|
||||
.settings/
|
||||
bin/
|
||||
*.classpath
|
||||
openDMP/dmp-backend/uploads/
|
||||
openDMP/dmp-backend/tmp/
|
||||
dmp-frontend/.angular/
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
ELK_VERSION=7.6.0
|
||||
# Leave blank to use the "basic" image flavours, which include X-Pack.
|
||||
# see https://www.elastic.co/subscriptions
|
||||
TAG=6.3.1
|
||||
ELASTIC_VERSION=6.3.1
|
||||
ELASTIC_PASSWORD=changeme
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
# Declare files that will always have LF line endings on checkout.
|
||||
*.sh text eol=lf
|
|
@ -1,21 +1,201 @@
|
|||
The MIT License (MIT)
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2015 Anthony Lapenna
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
|
@ -1,4 +1,25 @@
|
|||
Init default users and retrieve passwords
|
||||
# stack-docker
|
||||
This example Docker Compose configuration demonstrates many components of the
|
||||
Elastic Stack, all running on a single machine under Docker.
|
||||
|
||||
1) connect to elasticsearch container with docker exec -it elastichsearch /bin/bash
|
||||
2) run ./bin/elasticsearch-setup-passwords auto >./data/passwords.txt (press y and enter when the console shows nothing)
|
||||
## Prerequisites
|
||||
- Docker and Compose. Windows and Mac users get Compose installed automatically
|
||||
with Docker. Linux users can:
|
||||
```
|
||||
pip install docker-compose
|
||||
```
|
||||
|
||||
- At least 4GiB of RAM for the containers. Windows and Mac users _must_
|
||||
configure their Docker virtual machine to have more than the default 2 GiB of
|
||||
RAM:
|
||||
|
||||
![Docker VM memory settings](screenshots/docker-vm-memory-settings.png)
|
||||
|
||||
## Starting the stack
|
||||
Try `docker-compose up` to create a demonstration Elastic Stack with
|
||||
Elasticsearch, Kibana, Logstash, Auditbeat, Metricbeat, Filebeat, Packetbeat,
|
||||
and Heartbeat.
|
||||
|
||||
Point a browser at [`http://localhost:5601`](http://localhost:5601) to see the results.
|
||||
|
||||
Log in with `elastic` / `changeme`.
|
||||
|
|
|
@ -1,87 +0,0 @@
|
|||
version: '2.4'
|
||||
|
||||
services:
|
||||
elasticsearch:
|
||||
user: 1002:1002 #develuser
|
||||
restart: unless-stopped
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- cluster.name=open-dmp-cluster
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xmx1024m -Xms1024m"
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.monitoring.collection.enabled=true
|
||||
- xpack.security.enabled=true
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- ./shared/config-elk/elasticsearch/config/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- ./shared/config-elk/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
||||
- ./shared/data-elk/elasticsearch-01-data:/usr/share/elasticsearch/data
|
||||
- ./shared/data-elk/elasticsearch-01-log:/usr/share/elasticsearch/logs
|
||||
#ports:
|
||||
# - 51056:9200
|
||||
# - 51057:9300
|
||||
ports:
|
||||
- "9200:9200"
|
||||
expose:
|
||||
- "9300"
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
|
||||
logstash:
|
||||
# user: 1002:1002 #develuser
|
||||
volumes:
|
||||
- ./shared/config-elk/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
|
||||
- ./shared/config-elk/logstash/config/pipelines.yml:/usr/share/logstash/config/pipelines.yml:ro
|
||||
- ./shared/config-elk/logstash/config/log4j2.properties:/usr/share/logstash/config/log4j2.properties:ro
|
||||
- ./shared/config-elk/logstash/pipeline:/usr/share/logstash/pipeline:ro
|
||||
- ./shared/config-elk/logstash/logstash/templates:/usr/share/logstash/templates
|
||||
- ./shared/data-elk/logstash-log:/usr/share/logstash/logs
|
||||
- ./shared/data-elk/logstash-queue:/usr/share/logstash/queue
|
||||
- ./shared/data-elk/logstash-dead_letter_queue:/usr/share/logstash/dead_letter_queue
|
||||
expose:
|
||||
- "31311"
|
||||
- "31312"
|
||||
restart: on-failure
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- LS_JAVA_OPTS=-Xmx1024m -Xms1024m
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
|
||||
kibana:
|
||||
# user: 1002:1002 #develuser
|
||||
mem_limit: 512m
|
||||
environment:
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
|
||||
volumes:
|
||||
- ./shared/config-elk/kibana/config:/usr/share/kibana/config:ro
|
||||
#- ./shared/config-elk/kibana/certificates:/usr/share/kibana/certificates
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "51058:5601"
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
filebeat:
|
||||
restart: unless-stopped
|
||||
mem_limit: 256m
|
||||
#command: [ "-e=false" ] # to overwrite the -e that disables logging to file!
|
||||
volumes:
|
||||
- ./shared/config-elk/filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro
|
||||
- ~/openDMP/logs:/usr/share/filebeat/log_data/dmp/
|
||||
- ./shared/data-elk/filebeat-log:/usr/share/filebeat/logs
|
||||
- ./shared/data-elk/filebeat-data:/usr/share/filebeat/data #For windows if we mount the data directory we get "Writing of registry returned error: sync /usr/share/filebeat/data/registry/filebeat: invalid argument."
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
networks:
|
||||
open-dmp-elk-network:
|
|
@ -1,43 +1,171 @@
|
|||
version: '2.4'
|
||||
|
||||
---
|
||||
version: '3'
|
||||
services:
|
||||
# The environment variable "TAG" is used throughout this file to
|
||||
# specify the version of the images to run. The default is set in the
|
||||
# '.env' file in this folder. It can be overridden with any normal
|
||||
# technique for setting environment variables, for example:
|
||||
#
|
||||
# TAG=6.0.0-beta1 docker-compose up
|
||||
#
|
||||
# REF: https://docs.docker.com/compose/compose-file/#variable-substitution
|
||||
#
|
||||
# Also be sure to set the ELASTIC_VERSION variable. For released versions,
|
||||
# ${TAG} and ${ELASTIC_VERSION} will be identical, but for pre-release
|
||||
# versions, ${TAG} might contain an extra build identifier, like
|
||||
# "6.0.0-beta1-3eab5b40", so a full invocation might look like:
|
||||
#
|
||||
# ELASTIC_VERSION=6.0.0-beta1 TAG=6.0.0-beta1-3eab5b40 docker-compose up
|
||||
#
|
||||
elasticsearch:
|
||||
image: ${DOCKER_REGISTRY}elasticsearch
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch
|
||||
build:
|
||||
context: elasticsearch/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
healthcheck:
|
||||
# test: curl --cacert /usr/share/elasticsearch/config/certificates/ca/ca.crt -s https://localhost:9200 >/dev/null; if [[ $$? == 52 ]]; then echo 0; else echo 1; fi
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
logstash:
|
||||
image: ${DOCKER_REGISTRY}logstash
|
||||
container_name: logstash
|
||||
build:
|
||||
context: logstash/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
#volumes:
|
||||
# - esdata:/usr/share/elasticsearch/data
|
||||
environment: ['http.host=0.0.0.0', 'transport.host=127.0.0.1', 'ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
ports: ['0.0.0.0:9200:9200']
|
||||
networks: ['stack']
|
||||
|
||||
kibana:
|
||||
image: ${DOCKER_REGISTRY}kibana
|
||||
build:
|
||||
context: kibana/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
filebeat:
|
||||
image: ${DOCKER_REGISTRY}filebeat
|
||||
build:
|
||||
context: filebeat/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- logstash
|
||||
image: docker.elastic.co/kibana/kibana:${TAG}
|
||||
container_name: kibana
|
||||
ports: ['0.0.0.0:5601:5601']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${TAG}
|
||||
container_name: logstash
|
||||
# Provide a simple pipeline configuration for Logstash with a bind-mounted file.
|
||||
volumes:
|
||||
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||
ports: ['0.0.0.0:31311:31311']
|
||||
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_logstash']
|
||||
|
||||
filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: filebeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# If the host system has logs at "/var/log", mount them at "/mnt/log"
|
||||
# inside the container, where Filebeat can find them.
|
||||
# volumes: ['/var/log:/mnt/log:ro']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_filebeat']
|
||||
|
||||
heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: heartbeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_heartbeat']
|
||||
|
||||
# Run a short-lived container to set up Logstash.
|
||||
setup_logstash:
|
||||
image: centos:7
|
||||
container_name: setup_logstash
|
||||
volumes: ['./scripts/setup-logstash.sh:/usr/local/bin/setup-logstash.sh:ro']
|
||||
# The script may have CR/LF line endings if using Docker for Windows, so
|
||||
# make sure that they don't confuse Bash.
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-logstash.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_kibana:
|
||||
image: centos:7
|
||||
container_name: setup_kibana
|
||||
volumes: ['./scripts/setup-kibana.sh:/usr/local/bin/setup-kibana.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-kibana.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: setup_filebeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s filebeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
setup_heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: setup_heartbeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s heartbeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
|
||||
##########################DOCSBOX######################################################################
|
||||
web:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
expose:
|
||||
- "8000"
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- docsbox:/home/docsbox
|
||||
- media:/home/docsbox/media
|
||||
command: gunicorn -b :8000 docsbox:app
|
||||
networks: ['stack']
|
||||
|
||||
rqworker:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- web
|
||||
command: rq worker -c docsbox.settings
|
||||
networks: ['stack']
|
||||
|
||||
rqscheduler:
|
||||
restart: always
|
||||
build: ./docsbox-master/docsbox
|
||||
links:
|
||||
- redis:redis
|
||||
volumes:
|
||||
- web
|
||||
command: rqscheduler -H redis -p 6379 -d 0
|
||||
networks: ['stack']
|
||||
|
||||
nginx:
|
||||
restart: always
|
||||
build: ./docsbox-master/nginx/
|
||||
ports:
|
||||
- "81:80"
|
||||
volumes:
|
||||
- web
|
||||
links:
|
||||
- web:web
|
||||
networks: ['stack']
|
||||
|
||||
redis:
|
||||
restart: always
|
||||
image: redis:latest
|
||||
expose:
|
||||
- "6379"
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
networks: ['stack']
|
||||
|
||||
|
||||
##########################SETTIGNS######################################################################
|
||||
|
||||
volumes:
|
||||
#esdata:
|
||||
#driver: local
|
||||
redisdata:
|
||||
driver: local
|
||||
docsbox:
|
||||
driver: local
|
||||
media:
|
||||
driver: local
|
||||
networks: {stack: {}}
|
||||
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/elasticsearch-docker
|
||||
FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}
|
||||
|
||||
RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install analysis-icu && \
|
||||
/usr/share/elasticsearch/bin/elasticsearch-plugin install analysis-phonetic
|
||||
|
||||
RUN groupmod -g 1002 elasticsearch
|
||||
RUN usermod -u 1002 -g 1002 elasticsearch
|
||||
RUN chown -R elasticsearch /usr/share/elasticsearch
|
||||
RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
-e 's/UID 1000/UID 1002/' \
|
||||
-e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint.sh
|
||||
RUN chown elasticsearch /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENV JAVA_HOME /usr/share/elasticsearch/jdk
|
||||
|
||||
# RUN mkdir /usr/share/elasticsearch/custom-plugins
|
||||
# COPY plugins/elasticsearch-analysis-greeklish-7.5.1.zip /usr/share/elasticsearch/custom-plugins/elasticsearch-analysis-greeklish-7.5.1.zip
|
||||
|
||||
# RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install file:///usr/share/elasticsearch/custom-plugins/elasticsearch-analysis-greeklish-7.5.1.zip
|
|
@ -1,15 +0,0 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
FROM docker.elastic.co/beats/filebeat:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 filebeat
|
||||
# RUN usermod -u 1002 -g 1002 filebeat
|
||||
# RUN chown -R filebeat /usr/share/filebeat
|
||||
# RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
# -e 's/UID 1000/UID 1002/' \
|
||||
# -e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint
|
||||
# RUN chown filebeat /usr/local/bin/docker-entrypoint
|
||||
|
||||
# USER 1002:1002
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/kibana-docker
|
||||
FROM docker.elastic.co/kibana/kibana:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 kibana
|
||||
# RUN usermod -g 1002 root
|
||||
# RUN usermod -u 1002 -g 1002 kibana
|
||||
# RUN chown -R kibana /usr/share/kibana
|
||||
|
||||
# USER 1002:1002
|
||||
|
||||
# Add your kibana plugins setup here
|
||||
# Example: RUN kibana-plugin install <name|url>
|
|
@ -1,20 +0,0 @@
|
|||
ARG ELK_VERSION
|
||||
|
||||
# https://github.com/elastic/logstash-docker
|
||||
FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
|
||||
|
||||
# USER root
|
||||
# RUN groupmod -g 1002 logstash
|
||||
# RUN usermod -u 1002 -g 1002 logstash
|
||||
# RUN chown -R logstash /usr/share/logstash
|
||||
# RUN sed -i -e 's/--userspec=1000/--userspec=1002/g' \
|
||||
# -e 's/UID 1000/UID 1002/' \
|
||||
# -e 's/chown -R 1000/chown -R 1002/' /usr/local/bin/docker-entrypoint
|
||||
# RUN chown logstash /usr/local/bin/docker-entrypoint
|
||||
|
||||
# USER 1002:1002
|
||||
|
||||
# Add your logstash plugins setup here
|
||||
# Example: RUN logstash-plugin install logstash-filter-json
|
||||
RUN logstash-plugin update logstash-input-beats
|
||||
RUN logstash-plugin update logstash-filter-grok
|
|
@ -1,3 +0,0 @@
|
|||
TAG=6.3.1
|
||||
ELASTIC_VERSION=6.3.1
|
||||
ELASTIC_PASSWORD=changeme
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,25 +0,0 @@
|
|||
# stack-docker
|
||||
This example Docker Compose configuration demonstrates many components of the
|
||||
Elastic Stack, all running on a single machine under Docker.
|
||||
|
||||
## Prerequisites
|
||||
- Docker and Compose. Windows and Mac users get Compose installed automatically
|
||||
with Docker. Linux users can:
|
||||
```
|
||||
pip install docker-compose
|
||||
```
|
||||
|
||||
- At least 4GiB of RAM for the containers. Windows and Mac users _must_
|
||||
configure their Docker virtual machine to have more than the default 2 GiB of
|
||||
RAM:
|
||||
|
||||
![Docker VM memory settings](screenshots/docker-vm-memory-settings.png)
|
||||
|
||||
## Starting the stack
|
||||
Try `docker-compose up` to create a demonstration Elastic Stack with
|
||||
Elasticsearch, Kibana, Logstash, Auditbeat, Metricbeat, Filebeat, Packetbeat,
|
||||
and Heartbeat.
|
||||
|
||||
Point a browser at [`http://localhost:5601`](http://localhost:5601) to see the results.
|
||||
|
||||
Log in with `elastic` / `changeme`.
|
|
@ -1,171 +0,0 @@
|
|||
---
|
||||
version: '3'
|
||||
services:
|
||||
# The environment variable "TAG" is used throughout this file to
|
||||
# specify the version of the images to run. The default is set in the
|
||||
# '.env' file in this folder. It can be overridden with any normal
|
||||
# technique for setting environment variables, for example:
|
||||
#
|
||||
# TAG=6.0.0-beta1 docker-compose up
|
||||
#
|
||||
# REF: https://docs.docker.com/compose/compose-file/#variable-substitution
|
||||
#
|
||||
# Also be sure to set the ELASTIC_VERSION variable. For released versions,
|
||||
# ${TAG} and ${ELASTIC_VERSION} will be identical, but for pre-release
|
||||
# versions, ${TAG} might contain an extra build identifier, like
|
||||
# "6.0.0-beta1-3eab5b40", so a full invocation might look like:
|
||||
#
|
||||
# ELASTIC_VERSION=6.0.0-beta1 TAG=6.0.0-beta1-3eab5b40 docker-compose up
|
||||
#
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch
|
||||
#volumes:
|
||||
# - esdata:/usr/share/elasticsearch/data
|
||||
environment: ['http.host=0.0.0.0', 'transport.host=127.0.0.1', 'ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
ports: ['0.0.0.0:9200:9200']
|
||||
networks: ['stack']
|
||||
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:${TAG}
|
||||
container_name: kibana
|
||||
ports: ['0.0.0.0:5601:5601']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${TAG}
|
||||
container_name: logstash
|
||||
# Provide a simple pipeline configuration for Logstash with a bind-mounted file.
|
||||
volumes:
|
||||
- ./config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||
ports: ['0.0.0.0:31311:31311']
|
||||
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_logstash']
|
||||
|
||||
filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: filebeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# If the host system has logs at "/var/log", mount them at "/mnt/log"
|
||||
# inside the container, where Filebeat can find them.
|
||||
# volumes: ['/var/log:/mnt/log:ro']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_filebeat']
|
||||
|
||||
heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: heartbeat
|
||||
command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_heartbeat']
|
||||
|
||||
# Run a short-lived container to set up Logstash.
|
||||
setup_logstash:
|
||||
image: centos:7
|
||||
container_name: setup_logstash
|
||||
volumes: ['./scripts/setup-logstash.sh:/usr/local/bin/setup-logstash.sh:ro']
|
||||
# The script may have CR/LF line endings if using Docker for Windows, so
|
||||
# make sure that they don't confuse Bash.
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-logstash.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_kibana:
|
||||
image: centos:7
|
||||
container_name: setup_kibana
|
||||
volumes: ['./scripts/setup-kibana.sh:/usr/local/bin/setup-kibana.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-kibana.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_filebeat:
|
||||
image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
container_name: setup_filebeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s filebeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
setup_heartbeat:
|
||||
image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
container_name: setup_heartbeat
|
||||
volumes: ['./scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s heartbeat']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['kibana']
|
||||
|
||||
|
||||
##########################DOCSBOX######################################################################
|
||||
# web:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# expose:
|
||||
# - "8000"
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - docsbox:/home/docsbox
|
||||
# - media:/home/docsbox/media
|
||||
# command: gunicorn -b :8000 docsbox:app
|
||||
# networks: ['stack']
|
||||
#
|
||||
# rqworker:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rq worker -c docsbox.settings
|
||||
# networks: ['stack']
|
||||
#
|
||||
# rqscheduler:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rqscheduler -H redis -p 6379 -d 0
|
||||
# networks: ['stack']
|
||||
#
|
||||
# nginx:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/nginx/
|
||||
# ports:
|
||||
# - "81:80"
|
||||
# volumes:
|
||||
# - web
|
||||
# links:
|
||||
# - web:web
|
||||
# networks: ['stack']
|
||||
#
|
||||
# redis:
|
||||
# restart: always
|
||||
# image: redis:latest
|
||||
# expose:
|
||||
# - "6379"
|
||||
# volumes:
|
||||
# - redisdata:/data
|
||||
# networks: ['stack']
|
||||
|
||||
|
||||
##########################SETTIGNS######################################################################
|
||||
|
||||
volumes:
|
||||
#esdata:
|
||||
#driver: local
|
||||
redisdata:
|
||||
driver: local
|
||||
docsbox:
|
||||
driver: local
|
||||
media:
|
||||
driver: local
|
||||
networks: {stack: {}}
|
||||
|
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
## Default Elasticsearch configuration from elasticsearch-docker.
|
||||
## from https://github.com/elastic/elasticsearch-docker/blob/master/build/elasticsearch/elasticsearch.yml
|
||||
#
|
||||
network.host: 0.0.0.0
|
||||
|
||||
# minimum_master_nodes need to be explicitly set when bound on a public IP
|
||||
# set to 1 to allow single node clusters
|
||||
# Details: https://github.com/elastic/elasticsearch/pull/17288
|
||||
# discovery.zen.minimum_master_nodes: 1
|
||||
|
||||
## Use single node discovery in order to disable production mode and avoid bootstrap checks
|
||||
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
|
||||
#
|
||||
discovery.type: single-node
|
||||
## Search Guard
|
||||
#
|
||||
cluster.routing.allocation.disk.watermark.flood_stage: 99%
|
||||
|
||||
|
|
@ -1,179 +0,0 @@
|
|||
#https://github.com/elastic/elasticsearch/blob/7.4/distribution/src/config/log4j2.properties
|
||||
|
||||
status = error
|
||||
|
||||
# log action execution errors for easier debugging
|
||||
logger.action.name = org.elasticsearch.action
|
||||
logger.action.level = debug
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
######## Server JSON ############################
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_server.json
|
||||
appender.rolling.layout.type = ESJsonLayout
|
||||
appender.rolling.layout.type_name = server
|
||||
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.json.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policies.size.size = 128MB
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.fileIndex = nomax
|
||||
appender.rolling.strategy.action.type = Delete
|
||||
appender.rolling.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling.strategy.action.condition.type = IfFileName
|
||||
appender.rolling.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
######## Server - old style pattern ###########
|
||||
appender.rolling_old.type = RollingFile
|
||||
appender.rolling_old.name = rolling_old
|
||||
appender.rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling_old.layout.type = PatternLayout
|
||||
appender.rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling_old.policies.type = Policies
|
||||
appender.rolling_old.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.time.interval = 1
|
||||
appender.rolling_old.policies.time.modulate = true
|
||||
appender.rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling_old.policies.size.size = 128MB
|
||||
appender.rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling_old.strategy.fileIndex = nomax
|
||||
appender.rolling_old.strategy.action.type = Delete
|
||||
appender.rolling_old.strategy.action.basepath = ${sys:es.logs.base_path}
|
||||
appender.rolling_old.strategy.action.condition.type = IfFileName
|
||||
appender.rolling_old.strategy.action.condition.glob = ${sys:es.logs.cluster_name}-*
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.type = IfAccumulatedFileSize
|
||||
appender.rolling_old.strategy.action.condition.nested_condition.exceeds = 2GB
|
||||
################################################
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.console.ref = console
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
rootLogger.appenderRef.rolling_old.ref = rolling_old
|
||||
|
||||
######## Deprecation JSON #######################
|
||||
appender.deprecation_rolling.type = RollingFile
|
||||
appender.deprecation_rolling.name = deprecation_rolling
|
||||
appender.deprecation_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.json
|
||||
appender.deprecation_rolling.layout.type = ESJsonLayout
|
||||
appender.deprecation_rolling.layout.type_name = deprecation
|
||||
appender.deprecation_rolling.layout.esmessagefields=x-opaque-id
|
||||
|
||||
appender.deprecation_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation-%i.json.gz
|
||||
appender.deprecation_rolling.policies.type = Policies
|
||||
appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling.policies.size.size = 1GB
|
||||
appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Deprecation - old style pattern #######
|
||||
appender.deprecation_rolling_old.type = RollingFile
|
||||
appender.deprecation_rolling_old.name = deprecation_rolling_old
|
||||
appender.deprecation_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}_deprecation.log
|
||||
appender.deprecation_rolling_old.layout.type = PatternLayout
|
||||
appender.deprecation_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.deprecation_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_deprecation-%i.log.gz
|
||||
appender.deprecation_rolling_old.policies.type = Policies
|
||||
appender.deprecation_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.deprecation_rolling_old.policies.size.size = 1GB
|
||||
appender.deprecation_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.deprecation_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.deprecation.name = org.elasticsearch.deprecation
|
||||
logger.deprecation.level = warn
|
||||
logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
|
||||
logger.deprecation.appenderRef.deprecation_rolling_old.ref = deprecation_rolling_old
|
||||
logger.deprecation.additivity = false
|
||||
|
||||
######## Search slowlog JSON ####################
|
||||
appender.index_search_slowlog_rolling.type = RollingFile
|
||||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog.json
|
||||
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
|
||||
appender.index_search_slowlog_rolling.layout.esmessagefields=message,took,took_millis,total_hits,types,stats,search_type,total_shards,source,id
|
||||
|
||||
appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs\
|
||||
.cluster_name}_index_search_slowlog-%i.json.gz
|
||||
appender.index_search_slowlog_rolling.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Search slowlog - old style pattern ####
|
||||
appender.index_search_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_search_slowlog_rolling_old.name = index_search_slowlog_rolling_old
|
||||
appender.index_search_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog.log
|
||||
appender.index_search_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_search_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_search_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_search_slowlog-%i.log.gz
|
||||
appender.index_search_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_search_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_search_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_search_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_search_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
logger.index_search_slowlog_rolling.name = index.search.slowlog
|
||||
logger.index_search_slowlog_rolling.level = trace
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling_old.ref = index_search_slowlog_rolling_old
|
||||
logger.index_search_slowlog_rolling.additivity = false
|
||||
|
||||
######## Indexing slowlog JSON ##################
|
||||
appender.index_indexing_slowlog_rolling.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.json
|
||||
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
|
||||
appender.index_indexing_slowlog_rolling.layout.esmessagefields=message,took,took_millis,doc_type,id,routing,source
|
||||
|
||||
appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.json.gz
|
||||
appender.index_indexing_slowlog_rolling.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling.strategy.max = 4
|
||||
#################################################
|
||||
######## Indexing slowlog - old style pattern ##
|
||||
appender.index_indexing_slowlog_rolling_old.type = RollingFile
|
||||
appender.index_indexing_slowlog_rolling_old.name = index_indexing_slowlog_rolling_old
|
||||
appender.index_indexing_slowlog_rolling_old.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog.log
|
||||
appender.index_indexing_slowlog_rolling_old.layout.type = PatternLayout
|
||||
appender.index_indexing_slowlog_rolling_old.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] [%node_name]%marker %m%n
|
||||
|
||||
appender.index_indexing_slowlog_rolling_old.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}\
|
||||
_index_indexing_slowlog-%i.log.gz
|
||||
appender.index_indexing_slowlog_rolling_old.policies.type = Policies
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.index_indexing_slowlog_rolling_old.policies.size.size = 1GB
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.type = DefaultRolloverStrategy
|
||||
appender.index_indexing_slowlog_rolling_old.strategy.max = 4
|
||||
#################################################
|
||||
|
||||
logger.index_indexing_slowlog.name = index.indexing.slowlog.index
|
||||
logger.index_indexing_slowlog.level = trace
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling_old.ref = index_indexing_slowlog_rolling_old
|
||||
logger.index_indexing_slowlog.additivity = false
|
|
@ -1,16 +0,0 @@
|
|||
#filebeat.registry_file: /usr/share/filebeat/registry
|
||||
filebeat.inputs:
|
||||
- type: log
|
||||
paths:
|
||||
- /usr/share/filebeat/log_data/dmp/openDMP*.log
|
||||
tags: ["audit"]
|
||||
enabled: true
|
||||
reload.enabled: true
|
||||
reload.period: 10s
|
||||
multiline.pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2}'
|
||||
multiline.negate: true
|
||||
multiline.match: after
|
||||
|
||||
output.logstash:
|
||||
hosts: ["logstash:31312"]
|
||||
bulk_max_size: 128
|
|
@ -1,17 +0,0 @@
|
|||
---
|
||||
## Default Kibana configuration from kibana-docker.
|
||||
## from https://github.com/elastic/kibana-docker/blob/master/build/kibana/config/kibana.yml
|
||||
#
|
||||
server.name: kibana
|
||||
server.host: "0"
|
||||
## Custom configuration
|
||||
#
|
||||
#server.basePath: "/eformslogs"
|
||||
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
|
||||
#elasticsearch.ssl.certificateAuthorities: [ "/usr/share/kibana/certificate_authorities/ca.crt" ]
|
||||
|
||||
elasticsearch.username: "kibana"
|
||||
elasticsearch.password: ""
|
||||
server.ssl.enabled: false
|
||||
#server.ssl.key: "/usr/share/kibana/certificates/kibana.key"
|
||||
#server.ssl.certificate: "/usr/share/kibana/certificates/kibana.crt"
|
|
@ -1,103 +0,0 @@
|
|||
#https://github.com/elastic/logstash/blob/7.4/config/log4j2.properties
|
||||
|
||||
status = error
|
||||
name = LogstashPropertiesConfig
|
||||
|
||||
appender.console.type = Console
|
||||
appender.console.name = plain_console
|
||||
appender.console.layout.type = PatternLayout
|
||||
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]} %m%n
|
||||
|
||||
appender.json_console.type = Console
|
||||
appender.json_console.name = json_console
|
||||
appender.json_console.layout.type = JSONLayout
|
||||
appender.json_console.layout.compact = true
|
||||
appender.json_console.layout.eventEol = true
|
||||
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = plain_rolling
|
||||
appender.rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
|
||||
appender.rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]} %m%n
|
||||
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling.policies.size.size = 100MB
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.max = 30
|
||||
|
||||
appender.json_rolling.type = RollingFile
|
||||
appender.json_rolling.name = json_rolling
|
||||
appender.json_rolling.fileName = ${sys:ls.logs}/logstash-${sys:ls.log.format}.log
|
||||
appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.json_rolling.policies.type = Policies
|
||||
appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.json_rolling.policies.time.interval = 1
|
||||
appender.json_rolling.policies.time.modulate = true
|
||||
appender.json_rolling.layout.type = JSONLayout
|
||||
appender.json_rolling.layout.compact = true
|
||||
appender.json_rolling.layout.eventEol = true
|
||||
appender.json_rolling.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.json_rolling.policies.size.size = 100MB
|
||||
appender.json_rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.json_rolling.strategy.max = 30
|
||||
|
||||
rootLogger.level = ${sys:ls.log.level}
|
||||
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console
|
||||
rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling
|
||||
|
||||
# Slowlog
|
||||
|
||||
appender.console_slowlog.type = Console
|
||||
appender.console_slowlog.name = plain_console_slowlog
|
||||
appender.console_slowlog.layout.type = PatternLayout
|
||||
appender.console_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
|
||||
|
||||
appender.json_console_slowlog.type = Console
|
||||
appender.json_console_slowlog.name = json_console_slowlog
|
||||
appender.json_console_slowlog.layout.type = JSONLayout
|
||||
appender.json_console_slowlog.layout.compact = true
|
||||
appender.json_console_slowlog.layout.eventEol = true
|
||||
|
||||
appender.rolling_slowlog.type = RollingFile
|
||||
appender.rolling_slowlog.name = plain_rolling_slowlog
|
||||
appender.rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
|
||||
appender.rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.rolling_slowlog.policies.type = Policies
|
||||
appender.rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling_slowlog.policies.time.interval = 1
|
||||
appender.rolling_slowlog.policies.time.modulate = true
|
||||
appender.rolling_slowlog.layout.type = PatternLayout
|
||||
appender.rolling_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
|
||||
appender.rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.rolling_slowlog.policies.size.size = 100MB
|
||||
appender.rolling_slowlog.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling_slowlog.strategy.max = 30
|
||||
|
||||
appender.json_rolling_slowlog.type = RollingFile
|
||||
appender.json_rolling_slowlog.name = json_rolling_slowlog
|
||||
appender.json_rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}.log
|
||||
appender.json_rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-${sys:ls.log.format}-%d{yyyy-MM-dd}-%i.log.gz
|
||||
appender.json_rolling_slowlog.policies.type = Policies
|
||||
appender.json_rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.json_rolling_slowlog.policies.time.interval = 1
|
||||
appender.json_rolling_slowlog.policies.time.modulate = true
|
||||
appender.json_rolling_slowlog.layout.type = JSONLayout
|
||||
appender.json_rolling_slowlog.layout.compact = true
|
||||
appender.json_rolling_slowlog.layout.eventEol = true
|
||||
appender.json_rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
|
||||
appender.json_rolling_slowlog.policies.size.size = 100MB
|
||||
appender.json_rolling_slowlog.strategy.type = DefaultRolloverStrategy
|
||||
appender.json_rolling_slowlog.strategy.max = 30
|
||||
|
||||
logger.slowlog.name = slowlog
|
||||
logger.slowlog.level = trace
|
||||
logger.slowlog.appenderRef.console_slowlog.ref = ${sys:ls.log.format}_console_slowlog
|
||||
logger.slowlog.appenderRef.rolling_slowlog.ref = ${sys:ls.log.format}_rolling_slowlog
|
||||
logger.slowlog.additivity = false
|
||||
|
||||
logger.licensereader.name = logstash.licensechecker.licensereader
|
||||
logger.licensereader.level = error
|
|
@ -1,10 +0,0 @@
|
|||
---
|
||||
## Default Logstash configuration from logstash-docker.
|
||||
## from https://github.com/elastic/logstash-docker/blob/master/build/logstash/config/logstash-oss.yml
|
||||
#
|
||||
http.host: "0.0.0.0"
|
||||
config.reload.automatic: true
|
||||
config.reload.interval: 300s
|
||||
path.queue: /usr/share/logstash/queue
|
||||
path.dead_letter_queue: /usr/share/logstash/dead_letter_queue
|
||||
xpack.monitoring.elasticsearch.password:
|
|
@ -1,18 +0,0 @@
|
|||
- pipeline.id: open_dmp_beats
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_beats.conf"
|
||||
queue.checkpoint.writes: 32
|
||||
- pipeline.id: open_dmp_main
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_main.conf"
|
||||
queue.checkpoint.writes: 32
|
||||
- pipeline.id: open_dmp_send_to_elastic
|
||||
queue.type: persisted
|
||||
queue.max_bytes: 50mb
|
||||
dead_letter_queue.enable: true
|
||||
path.config: "/usr/share/logstash/pipeline/open_dmp_send_to_elastic.conf"
|
||||
queue.checkpoint.writes: 32
|
|
@ -1,14 +0,0 @@
|
|||
input {
|
||||
beats {
|
||||
port => 31312
|
||||
ssl => false
|
||||
client_inactivity_timeout => 3000
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
}
|
||||
|
||||
output {
|
||||
pipeline { send_to => open_dmp_main }
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
input {
|
||||
pipeline { address => open_dmp_main }
|
||||
}
|
||||
|
||||
filter {
|
||||
grok {
|
||||
match => { "message" => "(?<timestamp>%{DATE} %{TIME})%{SPACE}%{LOGLEVEL:level} %{NUMBER:pid} --- \[%{DATA:thread}\] %{DATA:class}%{SPACE}: %{GREEDYDATA:logmessage}" }
|
||||
}
|
||||
if "_grokparsefailure" not in [tags] {
|
||||
mutate
|
||||
{
|
||||
remove_field => [ "message" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
pipeline { send_to => open_dmp_send_to_elastic }
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
input {
|
||||
pipeline { address => open_dmp_send_to_elastic }
|
||||
}
|
||||
|
||||
filter {
|
||||
}
|
||||
|
||||
output {
|
||||
elasticsearch {
|
||||
hosts => "elasticsearch:9200"
|
||||
user => elastic
|
||||
password =>
|
||||
index =>"opendmp.logs"
|
||||
#manage_template => true
|
||||
#template => "/usr/share/logstash/templates/audit/openDMP.json"
|
||||
#template_name => "cite.elas.openDMP-audit*"
|
||||
#template_overwrite => true
|
||||
}
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
def pipelineContext = [:]
|
||||
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
options {
|
||||
skipDefaultCheckout(true)
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
checkout scm
|
||||
}
|
||||
}
|
||||
stage('Build API') {
|
||||
steps {
|
||||
script {
|
||||
pipelineContext.apiImage = docker.build("open-dmp-api:${env.BUILD_ID}", "-f dmp-backend/Dockerfile.CI dmp-backend/")
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Build WebApp') {
|
||||
steps {
|
||||
script {
|
||||
pipelineContext.webappImage = docker.build("open-dmp-webapp:${env.BUILD_ID}", "-f dmp-frontend/Dockerfile.CI dmp-frontend/")
|
||||
}
|
||||
}
|
||||
}
|
||||
//stage('SonarQube analysis') {
|
||||
// steps {
|
||||
// script {
|
||||
// def scannerHome = tool 'SonarQube Scanner 4.3';
|
||||
// withSonarQubeEnv('SonarQube') { // If you have configured more than one global server connection, you can specify its name
|
||||
// sh "${scannerHome}/bin/sonar-scanner"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
//// waiting for sonar results based into the configured web hook in Sonar server which push the status back to jenkins
|
||||
//stage('SonarQube scan result check') {
|
||||
// steps {
|
||||
// timeout(time: 2, unit: 'MINUTES') {
|
||||
// retry(3) {
|
||||
// script {
|
||||
// def qg = waitForQualityGate()
|
||||
// if (qg.status != 'OK') {
|
||||
// error "Pipeline aborted due to quality gate failure: ${qg.status}"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
stage('Pushing to Docker Registry') {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('http://drepo.local.cite.gr', 'b2c651c1-9a3b-4a98-a6da-e1dd7a20f512') {
|
||||
pipelineContext.apiImage.push()
|
||||
pipelineContext.webappImage.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
21
LICENSE
21
LICENSE
|
@ -1,21 +0,0 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019-2020 OpenAIRE AMKE
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
69
README.md
69
README.md
|
@ -1,69 +0,0 @@
|
|||
|
||||
**Important note: The deployment, troubleshooting, maintenance and operation of on-premises / self-served OpenDMP instances for development, testing or production use, shall be the sole responsibility of the adopter. No support is guaranteed by OpenDMP implentation team for issues that may be encountered during deployment, extension or operation of such installations.**
|
||||
|
||||
**Documentation is provided on a best-effort basis for the code and processes around the development, deployment and operation of OpenDMP. If you find any misalignment of the actual processes with the related documentation, please let us know so that the misalignment is addressed for the benefit of future adopters.**
|
||||
|
||||
|
||||
# Using Docker Compose with Argos
|
||||
|
||||
ARGOS is an open extensible service that simplifies the management, validation, monitoring and maintenance and of Data Management Plans. It allows actors (researchers, managers, supervisors etc) to create actionable DMPs that may be freely exchanged among infrastructures for carrying out specific aspects of the Data management process in accordance with the intentions and commitment of Data owners.
|
||||
|
||||
## Before running the docker compose commands, configurations must be set
|
||||
|
||||
### Database
|
||||
|
||||
First of all, database must be configured
|
||||
|
||||
The only file that has to be changed is **/dmp-db-scema/Docker/dmp-db.env**
|
||||
|
||||
```bash
|
||||
ADMIN_USER: Admin username (app)
|
||||
ADMIN_PASSWORD: Admin password (app)
|
||||
|
||||
POSTGRES_DB: database name
|
||||
POSTGRES_USER: Admin username (database)
|
||||
POSTGRES_PASSWORD: Admin password (database)
|
||||
```
|
||||
|
||||
### Backend
|
||||
|
||||
Secondly, a few more options should be asigned
|
||||
|
||||
The file **/dmp-backend/web/src/main/resources/config/application-docker.properties** contains all the necessary properties
|
||||
|
||||
Values to be modified:
|
||||
```bash
|
||||
database.url: the url that is used to connect to database (JDBC based)
|
||||
database.username: database admin username
|
||||
database.password: database admin password
|
||||
|
||||
elasticsearch.*(optional): setup elastic, check Elasticsearch(optional) section below
|
||||
|
||||
google.login.clientId(optional): google as login provider
|
||||
```
|
||||
**NOTE:** if you want to configure and integrate other providers, check this reference [Setup configurable login](https://code-repo.d4science.org/MaDgiK-CITE/argos/wiki/Page-2A:-Setup-configurable-login)
|
||||
|
||||
If you provide google.login.clientId, then the same value should be set in the field named **loginProviders.googleConfiguration.clientId** which belongs to **/dmp-frontend/src/assets/config/config.json**
|
||||
|
||||
## You are ready to build and run the entire application using Docker-compose
|
||||
|
||||
1. Go to the project's root directory
|
||||
2. Type in the **Terminal** `docker volume create --name=dmpdata`
|
||||
3. Type in the **Terminal** `docker-compose up -d --build`
|
||||
4. After it's complete your application is running on [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
### Elasticsearch(optional)
|
||||
If you want to set up elasticsearch, you will need the password for the **elastic** user
|
||||
|
||||
After your application is running, type in the **Terminal** `docker exec -it elasticsearch /bin/sh`
|
||||
|
||||
Run the command `cat data/passwords.txt` in the shell and save its output
|
||||
|
||||
Finally, run `exit` to get back to your terminal
|
||||
|
||||
The elastic's password that you get has to be set in the **elasticsearch.password** property in the backend configuration
|
||||
|
||||
Rerun the application
|
||||
|
||||
1. Type in the **Terminal** `docker-compose down`
|
||||
2. Type in the **Terminal** `docker-compose up -d --build`
|
|
@ -1 +0,0 @@
|
|||
PROFILE=docker
|
|
@ -1,23 +1,11 @@
|
|||
FROM maven:3-jdk-11 AS MAVEN_BUILD
|
||||
|
||||
|
||||
|
||||
COPY pom.xml /build/
|
||||
COPY data /build/data/
|
||||
COPY elastic /build/elastic/
|
||||
#COPY logging /build/logging/
|
||||
COPY queryable /build/queryable/
|
||||
COPY web /build/web/
|
||||
|
||||
|
||||
|
||||
WORKDIR /build/
|
||||
RUN mvn package -q
|
||||
|
||||
|
||||
|
||||
FROM amazoncorretto:11
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=MAVEN_BUILD /build/web/target/web-1.0-SNAPSHOT.jar /app.jar
|
||||
ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom" ,"-Dspring.profiles.active=${PROFILE}","-jar","/app.jar"]
|
||||
FROM openjdk:8-jdk-alpine
|
||||
RUN apk add --update \
|
||||
curl \
|
||||
&& rm -rf /var/cache/apk/*
|
||||
VOLUME /tmp
|
||||
ARG PROFILE=production
|
||||
ENV PROF $PROFILE
|
||||
ADD web/src/main/resources/ProjectConfiguration.xml /tmp/ProjectConfiguration.xml
|
||||
ADD web/src/main/resources/ExternalUrls.xml /tmp/ExternalUrls.xml
|
||||
ADD web/target/web-1.0-SNAPSHOT.jar app.jar
|
||||
ENTRYPOINT ["java","-Djava.security.egd=file:/dev/./urandom" ,"-Dspring.profiles.active=${PROF}","-jar","/app.jar"]
|
|
@ -1,15 +0,0 @@
|
|||
FROM maven:3-openjdk-11 AS MAVEN_BUILD
|
||||
|
||||
COPY pom.xml /build/
|
||||
COPY data /build/data/
|
||||
COPY elastic /build/elastic/
|
||||
COPY queryable /build/queryable/
|
||||
COPY web /build/web/
|
||||
|
||||
WORKDIR /build/
|
||||
RUN mvn package
|
||||
|
||||
FROM adoptopenjdk/openjdk11:alpine-jre
|
||||
WORKDIR /app
|
||||
COPY --from=MAVEN_BUILD /build/web/target/web-1.0-SNAPSHOT.jar /app/app.jar
|
||||
ENTRYPOINT ["java", "-Djava.security.egd=file:/dev/./urandom", "-Dspring.profiles.active=${PROF}", "-Dspring.config.additional-location=/files/config/", "-cp", "/app/app.jar", "-Dloader.path=/files/repo-jars", "org.springframework.boot.loader.PropertiesLauncher"]
|
|
@ -4,6 +4,7 @@
|
|||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>eu.eudat</groupId>
|
||||
<artifactId>data</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
@ -25,10 +26,5 @@
|
|||
<artifactId>elastic</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-annotations</artifactId>
|
||||
<version>1.5.20</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
|
@ -1,7 +1,5 @@
|
|||
package eu.eudat.data.converters;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.format.datetime.DateFormatter;
|
||||
|
||||
import javax.persistence.AttributeConverter;
|
||||
|
@ -17,7 +15,6 @@ import java.util.TimeZone;
|
|||
*/
|
||||
@Converter
|
||||
public class DateToUTCConverter implements AttributeConverter<Date, Date> {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DateToUTCConverter.class);
|
||||
|
||||
@Override
|
||||
public Date convertToDatabaseColumn(Date attribute) {
|
||||
|
@ -28,7 +25,7 @@ public class DateToUTCConverter implements AttributeConverter<Date, Date> {
|
|||
String date = formatterIST.format(attribute);
|
||||
return formatterIST.parse(date);
|
||||
} catch (ParseException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -42,7 +39,7 @@ public class DateToUTCConverter implements AttributeConverter<Date, Date> {
|
|||
String date = formatterIST.format(dbData);
|
||||
return formatterIST.parse(date);
|
||||
} catch (ParseException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
|
||||
import eu.eudat.queryable.queryableentity.DataEntity;
|
||||
|
||||
public abstract class Criteria<T> {
|
||||
@ApiModelProperty(value = "like", name = "like", dataType = "String", allowEmptyValue = true, example = "\"\"")
|
||||
private String like;
|
||||
|
||||
public String getLike() {
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.DMPProfile;
|
||||
|
||||
public class DataManagementPlanBlueprintCriteria extends Criteria<DMPProfile> {
|
||||
|
||||
private Integer status;
|
||||
|
||||
public Integer getStatus() {
|
||||
return status;
|
||||
}
|
||||
public void setStatus(Integer status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,8 +1,8 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.DMPProfile;
|
||||
import eu.eudat.data.entities.Grant;
|
||||
|
||||
import eu.eudat.data.entities.Project;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -11,19 +11,13 @@ import java.util.UUID;
|
|||
public class DataManagementPlanCriteria extends Criteria<DMP> {
|
||||
private Date periodStart;
|
||||
private Date periodEnd;
|
||||
private DMPProfile profile;
|
||||
private List<eu.eudat.data.entities.Grant> grants;
|
||||
private List<eu.eudat.data.entities.Project> projects;
|
||||
private boolean allVersions;
|
||||
private List<UUID> groupIds;
|
||||
private Integer status;
|
||||
private List<String> organisations;
|
||||
private Integer role;
|
||||
private List<UUID> collaborators;
|
||||
private List<UUID> datasetTemplates;
|
||||
private boolean isPublic;
|
||||
private boolean onlyPublic;
|
||||
private Short grantStatus;
|
||||
private boolean hasDoi;
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
|
@ -39,18 +33,11 @@ public class DataManagementPlanCriteria extends Criteria<DMP> {
|
|||
this.periodEnd = periodEnd;
|
||||
}
|
||||
|
||||
public DMPProfile getProfile() {
|
||||
return profile;
|
||||
public List<Project> getProjects() {
|
||||
return projects;
|
||||
}
|
||||
public void setProfile(DMPProfile profile) {
|
||||
this.profile = profile;
|
||||
}
|
||||
|
||||
public List<Grant> getGrants() {
|
||||
return grants;
|
||||
}
|
||||
public void setGrants(List<Grant> grants) {
|
||||
this.grants = grants;
|
||||
public void setProjects(List<Project> projects) {
|
||||
this.projects = projects;
|
||||
}
|
||||
|
||||
public boolean getAllVersions() {
|
||||
|
@ -94,42 +81,4 @@ public class DataManagementPlanCriteria extends Criteria<DMP> {
|
|||
public void setCollaborators(List<UUID> collaborators) {
|
||||
this.collaborators = collaborators;
|
||||
}
|
||||
|
||||
public List<UUID> getDatasetTemplates() {
|
||||
return datasetTemplates;
|
||||
}
|
||||
public void setDatasetTemplates(List<UUID> datasetTemplates) {
|
||||
this.datasetTemplates = datasetTemplates;
|
||||
}
|
||||
|
||||
public boolean getIsPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
public void setIsPublic(boolean isPublic) {
|
||||
this.isPublic = isPublic;
|
||||
}
|
||||
|
||||
public boolean isOnlyPublic() {
|
||||
return onlyPublic;
|
||||
}
|
||||
|
||||
public void setOnlyPublic(boolean onlyPublic) {
|
||||
this.onlyPublic = onlyPublic;
|
||||
}
|
||||
|
||||
public Short getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
|
||||
public void setGrantStatus(Short grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
}
|
||||
|
||||
public boolean hasDoi() {
|
||||
return hasDoi;
|
||||
}
|
||||
|
||||
public void setHasDoi(boolean hasDoi) {
|
||||
this.hasDoi = hasDoi;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,32 +1,29 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import eu.eudat.types.project.ProjectStateType;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
|
||||
private GrantStateType grantStatus;
|
||||
private List<UUID> grants;
|
||||
private ProjectStateType projectStatus;
|
||||
private List<UUID> projects;
|
||||
public List<UUID> datasetProfile;
|
||||
private List<String> dmpOrganisations;
|
||||
private Integer role;
|
||||
private boolean allVersions;
|
||||
private List<UUID> groupIds;
|
||||
|
||||
public GrantStateType getGrantStatus() {
|
||||
return grantStatus;
|
||||
public ProjectStateType getProjectStatus() {
|
||||
return projectStatus;
|
||||
}
|
||||
public void setGrantStatus(GrantStateType grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
public void setProjectStatus(ProjectStateType projectStatus) {
|
||||
this.projectStatus = projectStatus;
|
||||
}
|
||||
|
||||
public List<UUID> getGrants() {
|
||||
return grants;
|
||||
public List<UUID> getProjects() {
|
||||
return projects;
|
||||
}
|
||||
public void setGrants(List<UUID> grants) {
|
||||
this.grants = grants;
|
||||
public void setProjects(List<UUID> projects) {
|
||||
this.projects = projects;
|
||||
}
|
||||
|
||||
public List<UUID> getDatasetProfile() {
|
||||
|
@ -42,25 +39,4 @@ public class DataManagementPlanPublicCriteria extends Criteria<DMP> {
|
|||
public void setDmpOrganisations(List<String> dmpOrganisations) {
|
||||
this.dmpOrganisations = dmpOrganisations;
|
||||
}
|
||||
|
||||
public Integer getRole() {
|
||||
return role;
|
||||
}
|
||||
public void setRole(Integer role) {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public boolean getAllVersions() {
|
||||
return allVersions;
|
||||
}
|
||||
public void setAllVersions(boolean allVersions) {
|
||||
this.allVersions = allVersions;
|
||||
}
|
||||
|
||||
public List<UUID> getGroupIds() {
|
||||
return groupIds;
|
||||
}
|
||||
public void setGroupIds(List<UUID> groupIds) {
|
||||
this.groupIds = groupIds;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,16 +2,6 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.DataRepository;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public class DataRepositoryCriteria extends Criteria<DataRepository> {
|
||||
|
||||
private UUID creationUserId;
|
||||
|
||||
public UUID getCreationUserId() {
|
||||
return creationUserId;
|
||||
}
|
||||
public void setCreationUserId(UUID creationUserId) {
|
||||
this.creationUserId = creationUserId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Dataset;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -19,13 +18,8 @@ public class DatasetCriteria extends Criteria<Dataset> {
|
|||
private UUID profileDatasetId;
|
||||
private List<String> organisations;
|
||||
private Integer role;
|
||||
private List<UUID> grants;
|
||||
private List<UUID> projects;
|
||||
private List<UUID> collaborators;
|
||||
private List<UUID> datasetTemplates;
|
||||
private List<UUID> groupIds;
|
||||
private Boolean isPublic;
|
||||
private Short grantStatus;
|
||||
private boolean hasDoi;
|
||||
|
||||
public boolean getAllVersions() {
|
||||
return allVersions;
|
||||
|
@ -90,11 +84,11 @@ public class DatasetCriteria extends Criteria<Dataset> {
|
|||
this.role = role;
|
||||
}
|
||||
|
||||
public List<UUID> getGrants() {
|
||||
return grants;
|
||||
public List<UUID> getProjects() {
|
||||
return projects;
|
||||
}
|
||||
public void setGrants(List<UUID> grants) {
|
||||
this.grants = grants;
|
||||
public void setProjects(List<UUID> projects) {
|
||||
this.projects = projects;
|
||||
}
|
||||
|
||||
public List<UUID> getCollaborators() {
|
||||
|
@ -103,42 +97,4 @@ public class DatasetCriteria extends Criteria<Dataset> {
|
|||
public void setCollaborators(List<UUID> collaborators) {
|
||||
this.collaborators = collaborators;
|
||||
}
|
||||
|
||||
public List<UUID> getDatasetTemplates() {
|
||||
return datasetTemplates;
|
||||
}
|
||||
public void setDatasetTemplates(List<UUID> datasetTemplates) {
|
||||
this.datasetTemplates = datasetTemplates;
|
||||
}
|
||||
|
||||
public List<UUID> getGroupIds() {
|
||||
return groupIds;
|
||||
}
|
||||
public void setGroupIds(List<UUID> groupIds) {
|
||||
this.groupIds = groupIds;
|
||||
}
|
||||
|
||||
public Boolean getIsPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
|
||||
public void setIsPublic(Boolean isPublic) {
|
||||
this.isPublic = isPublic;
|
||||
}
|
||||
|
||||
public Short getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
|
||||
public void setGrantStatus(Short grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
}
|
||||
|
||||
public boolean hasDoi() {
|
||||
return hasDoi;
|
||||
}
|
||||
|
||||
public void setHasDoi(boolean hasDoi) {
|
||||
this.hasDoi = hasDoi;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,101 +1,17 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
|
||||
import eu.eudat.data.entities.DescriptionTemplate;
|
||||
|
||||
import java.util.Date;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class DatasetProfileCriteria extends Criteria<DescriptionTemplate> {
|
||||
|
||||
public enum DatasetProfileFilter {
|
||||
DMPs((short) 0), Datasets((short) 1);
|
||||
|
||||
private short value;
|
||||
private DatasetProfileFilter(short value) {
|
||||
this.value = value;
|
||||
}
|
||||
public short getValue() { return value; }
|
||||
|
||||
public static DatasetProfileFilter fromInteger(short value) {
|
||||
switch (value) {
|
||||
case 0:
|
||||
return DMPs;
|
||||
case 1:
|
||||
return Datasets;
|
||||
default:
|
||||
throw new RuntimeException("Unsupported DescriptionTemplate filter");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class DatasetProfileCriteria extends Criteria<DatasetProfile> {
|
||||
private boolean allVersions;
|
||||
private List<UUID> groupIds;
|
||||
private Short filter;
|
||||
private UUID userId;
|
||||
private boolean finalized;
|
||||
private Integer status;
|
||||
private Integer role;
|
||||
private List<UUID> ids;
|
||||
private Date periodStart;
|
||||
|
||||
public boolean getAllVersions() { return allVersions; }
|
||||
public void setAllVersions(boolean allVersions) { this.allVersions = allVersions; }
|
||||
|
||||
public List<UUID> getGroupIds() { return groupIds; }
|
||||
public void setGroupIds(List<UUID> groupIds) { this.groupIds = groupIds; }
|
||||
|
||||
public Short getFilter() {
|
||||
return filter;
|
||||
}
|
||||
public void setFilter(Short filter) {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
public UUID getUserId() {
|
||||
return userId;
|
||||
}
|
||||
public void setUserId(UUID userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
|
||||
public boolean getFinalized() {
|
||||
return finalized;
|
||||
}
|
||||
public void setFinalized(boolean finalized) {
|
||||
this.finalized = finalized;
|
||||
}
|
||||
|
||||
public Integer getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public void setStatus(Integer status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public Integer getRole() {
|
||||
return role;
|
||||
}
|
||||
|
||||
public void setRole(Integer role) {
|
||||
this.role = role;
|
||||
}
|
||||
|
||||
public List<UUID> getIds() {
|
||||
return ids;
|
||||
}
|
||||
|
||||
public void setIds(List<UUID> ids) {
|
||||
this.ids = ids;
|
||||
}
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.DescriptionTemplate;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
|
||||
public class DatasetProfileWizardCriteria extends Criteria<DescriptionTemplate> {
|
||||
public class DatasetProfileWizardCriteria extends Criteria<DatasetProfile> {
|
||||
private UUID id;
|
||||
|
||||
public UUID getId() {
|
||||
|
|
|
@ -2,7 +2,7 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Dataset;
|
||||
import eu.eudat.elastic.entities.Tag;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import eu.eudat.types.project.ProjectStateType;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
@ -11,31 +11,32 @@ import java.util.UUID;
|
|||
* Created by ikalyvas on 10/2/2018.
|
||||
*/
|
||||
public class DatasetPublicCriteria extends Criteria<Dataset>{
|
||||
private GrantStateType grantStatus;
|
||||
private List<UUID> grants;
|
||||
private List<UUID> datasetProfile;
|
||||
private List<String> dmpOrganisations;
|
||||
private List<Tag> tags;
|
||||
private List<UUID> dmpIds;
|
||||
private Integer role;
|
||||
public ProjectStateType projectStatus;
|
||||
public List<UUID> projects;
|
||||
public List<UUID> datasetProfile;
|
||||
public List<String> dmpOrganisations;
|
||||
public List<Tag> tags;
|
||||
|
||||
public GrantStateType getGrantStatus() {
|
||||
return grantStatus;
|
||||
}
|
||||
public void setGrantStatus(GrantStateType grantStatus) {
|
||||
this.grantStatus = grantStatus;
|
||||
public ProjectStateType getProjectStatus() {
|
||||
return projectStatus;
|
||||
}
|
||||
|
||||
public List<UUID> getGrants() {
|
||||
return grants;
|
||||
public void setProjectStatus(ProjectStateType projectStatus) {
|
||||
this.projectStatus = projectStatus;
|
||||
}
|
||||
public void setGrants(List<UUID> grants) {
|
||||
this.grants = grants;
|
||||
|
||||
public List<UUID> getProjects() {
|
||||
return projects;
|
||||
}
|
||||
|
||||
public void setProjects(List<UUID> projects) {
|
||||
this.projects = projects;
|
||||
}
|
||||
|
||||
public List<UUID> getDatasetProfile() {
|
||||
return datasetProfile;
|
||||
}
|
||||
|
||||
public void setDatasetProfile(List<UUID> datasetProfile) {
|
||||
this.datasetProfile = datasetProfile;
|
||||
}
|
||||
|
@ -43,6 +44,7 @@ public class DatasetPublicCriteria extends Criteria<Dataset>{
|
|||
public List<String> getDmpOrganisations() {
|
||||
return dmpOrganisations;
|
||||
}
|
||||
|
||||
public void setDmpOrganisations(List<String> dmpOrganisations) {
|
||||
this.dmpOrganisations = dmpOrganisations;
|
||||
}
|
||||
|
@ -50,21 +52,8 @@ public class DatasetPublicCriteria extends Criteria<Dataset>{
|
|||
public List<Tag> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(List<Tag> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<UUID> getDmpIds() {
|
||||
return dmpIds;
|
||||
}
|
||||
public void setDmpIds(List<UUID> dmpIds) {
|
||||
this.dmpIds = dmpIds;
|
||||
}
|
||||
|
||||
public Integer getRole() {
|
||||
return role;
|
||||
}
|
||||
public void setRole(Integer role) {
|
||||
this.role = role;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.EmailConfirmation;
|
||||
|
||||
public class EmailConfirmationCriteria extends Criteria<EmailConfirmation>{
|
||||
}
|
|
@ -1,15 +1,7 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
|
||||
import eu.eudat.data.entities.ExternalDataset;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ExternalDatasetCriteria extends Criteria<ExternalDataset> {
|
||||
private UUID creationUserId;
|
||||
|
||||
public UUID getCreationUserId() {
|
||||
return creationUserId;
|
||||
}
|
||||
public void setCreationUserId(UUID creationUserId) {
|
||||
this.creationUserId = creationUserId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.Funder;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class FunderCriteria extends Criteria<Funder> {
|
||||
private String reference;
|
||||
private String exactReference;
|
||||
private Date periodStart;
|
||||
|
||||
public String getReference() {
|
||||
return reference;
|
||||
}
|
||||
public void setReference(String reference) {
|
||||
this.reference = reference;
|
||||
}
|
||||
|
||||
public String getExactReference() {
|
||||
return exactReference;
|
||||
}
|
||||
|
||||
public void setExactReference(String exactReference) {
|
||||
this.exactReference = exactReference;
|
||||
}
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.Grant;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class GrantCriteria extends Criteria<Grant> {
|
||||
private Date periodStart;
|
||||
private Date periodEnd;
|
||||
private String reference;
|
||||
private Integer grantStateType;
|
||||
private boolean isPublic;
|
||||
private String funderId;
|
||||
private String funderReference;
|
||||
private String exactReference;
|
||||
private boolean isActive;
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
|
||||
public Date getPeriodEnd() {
|
||||
return periodEnd;
|
||||
}
|
||||
public void setPeriodEnd(Date periodEnd) {
|
||||
this.periodEnd = periodEnd;
|
||||
}
|
||||
|
||||
public String getReference() {
|
||||
return reference;
|
||||
}
|
||||
public void setReference(String reference) {
|
||||
this.reference = reference;
|
||||
}
|
||||
|
||||
public Integer getGrantStateType() {
|
||||
return grantStateType;
|
||||
}
|
||||
public void setGrantStateType(Integer grantStateType) {
|
||||
this.grantStateType = grantStateType;
|
||||
}
|
||||
|
||||
public boolean isPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
public void setPublic(boolean aPublic) {
|
||||
isPublic = aPublic;
|
||||
}
|
||||
|
||||
public String getFunderId() {
|
||||
return funderId;
|
||||
}
|
||||
public void setFunderId(String funderId) {
|
||||
this.funderId = funderId;
|
||||
}
|
||||
|
||||
public String getFunderReference() {
|
||||
return funderReference;
|
||||
}
|
||||
public void setFunderReference(String funderReference) {
|
||||
this.funderReference = funderReference;
|
||||
}
|
||||
|
||||
public String getExactReference() {
|
||||
return exactReference;
|
||||
}
|
||||
|
||||
public void setExactReference(String exactReference) {
|
||||
this.exactReference = exactReference;
|
||||
}
|
||||
|
||||
public boolean isActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setActive(boolean active) {
|
||||
isActive = active;
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
public class LockCriteria extends Criteria<Lock> {
|
||||
|
||||
private UUID target;
|
||||
private UserInfo lockedBy;
|
||||
private Date touchedAt;
|
||||
|
||||
public UUID getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(UUID target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public UserInfo getLockedBy() {
|
||||
return lockedBy;
|
||||
}
|
||||
|
||||
public void setLockedBy(UserInfo lockedBy) {
|
||||
this.lockedBy = lockedBy;
|
||||
}
|
||||
|
||||
public Date getTouchedAt() {
|
||||
return touchedAt;
|
||||
}
|
||||
|
||||
public void setTouchedAt(Date touchedAt) {
|
||||
this.touchedAt = touchedAt;
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.enumeration.notification.ActiveStatus;
|
||||
import eu.eudat.data.enumeration.notification.NotifyState;
|
||||
|
||||
public class NotificationCriteria {
|
||||
|
||||
private ActiveStatus isActive;
|
||||
private NotifyState notifyState;
|
||||
|
||||
public ActiveStatus getIsActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setIsActive(ActiveStatus isActive) {
|
||||
this.isActive = isActive;
|
||||
}
|
||||
|
||||
public NotifyState getNotifyState() {
|
||||
return notifyState;
|
||||
}
|
||||
|
||||
public void setNotifyState(NotifyState notifyState) {
|
||||
this.notifyState = notifyState;
|
||||
}
|
||||
}
|
|
@ -5,7 +5,6 @@ import eu.eudat.data.entities.Organisation;
|
|||
public class OrganisationCriteria extends Criteria<Organisation> {
|
||||
private String labelLike;
|
||||
private Boolean isPublic;
|
||||
private boolean isActive;
|
||||
|
||||
public String getLabelLike() {
|
||||
return labelLike;
|
||||
|
@ -20,12 +19,4 @@ public class OrganisationCriteria extends Criteria<Organisation> {
|
|||
public void setPublic(Boolean aPublic) {
|
||||
isPublic = aPublic;
|
||||
}
|
||||
|
||||
public boolean isActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setActive(boolean active) {
|
||||
isActive = active;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +1,49 @@
|
|||
package eu.eudat.data.dao.criteria;
|
||||
|
||||
import eu.eudat.data.entities.Project;
|
||||
import eu.eudat.types.project.ProjectStateType;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class ProjectCriteria extends Criteria<Project> {
|
||||
private String reference;
|
||||
private String exactReference;
|
||||
private Date periodStart;
|
||||
private Date periodStart;
|
||||
private Date periodEnd;
|
||||
private String reference;
|
||||
private Integer projectStateType;
|
||||
private boolean isPublic;
|
||||
|
||||
public String getReference() {
|
||||
return reference;
|
||||
}
|
||||
public void setReference(String reference) {
|
||||
this.reference = reference;
|
||||
}
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
|
||||
public String getExactReference() {
|
||||
return exactReference;
|
||||
}
|
||||
public Date getPeriodEnd() {
|
||||
return periodEnd;
|
||||
}
|
||||
public void setPeriodEnd(Date periodEnd) {
|
||||
this.periodEnd = periodEnd;
|
||||
}
|
||||
|
||||
public void setExactReference(String exactReference) {
|
||||
this.exactReference = exactReference;
|
||||
}
|
||||
public String getReference() {
|
||||
return reference;
|
||||
}
|
||||
public void setReference(String reference) {
|
||||
this.reference = reference;
|
||||
}
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
public Integer getProjectStateType() {
|
||||
return projectStateType;
|
||||
}
|
||||
public void setProjectStateType(Integer projectStateType) {
|
||||
this.projectStateType = projectStateType;
|
||||
}
|
||||
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
public boolean isPublic() {
|
||||
return isPublic;
|
||||
}
|
||||
public void setPublic(boolean aPublic) {
|
||||
isPublic = aPublic;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,16 +2,6 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Registry;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public class RegistryCriteria extends Criteria<Registry> {
|
||||
|
||||
private UUID creationUserId;
|
||||
|
||||
public UUID getCreationUserId() {
|
||||
return creationUserId;
|
||||
}
|
||||
public void setCreationUserId(UUID creationUserId) {
|
||||
this.creationUserId = creationUserId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,12 +2,8 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Researcher;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
public class ResearcherCriteria extends Criteria<Researcher> {
|
||||
private String name;
|
||||
private String reference;
|
||||
private Date periodStart;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
|
@ -16,20 +12,4 @@ public class ResearcherCriteria extends Criteria<Researcher> {
|
|||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getReference() {
|
||||
return reference;
|
||||
}
|
||||
|
||||
public void setReference(String reference) {
|
||||
this.reference = reference;
|
||||
}
|
||||
|
||||
public Date getPeriodStart() {
|
||||
return periodStart;
|
||||
}
|
||||
|
||||
public void setPeriodStart(Date periodStart) {
|
||||
this.periodStart = periodStart;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,16 +2,6 @@ package eu.eudat.data.dao.criteria;
|
|||
|
||||
import eu.eudat.data.entities.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public class ServiceCriteria extends Criteria<Service> {
|
||||
|
||||
private UUID creationUserId;
|
||||
|
||||
public UUID getCreationUserId() {
|
||||
return creationUserId;
|
||||
}
|
||||
public void setCreationUserId(UUID creationUserId) {
|
||||
this.creationUserId = creationUserId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ import eu.eudat.queryable.queryableentity.DataEntity;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.Set;
|
||||
|
||||
|
||||
|
@ -29,7 +28,6 @@ public class DatabaseService<T extends DataEntity> {
|
|||
return this.databaseCtx.getQueryable(tClass);
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public T createOrUpdate(T item, Class<T> tClass) {
|
||||
return this.databaseCtx.createOrUpdate(item, tClass);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ import eu.eudat.data.entities.UserDMP;
|
|||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DMPDao extends DatabaseAccessLayer<DMP, UUID> {
|
||||
|
@ -17,6 +16,6 @@ public interface DMPDao extends DatabaseAccessLayer<DMP, UUID> {
|
|||
|
||||
QueryableList<DMP> getUserDmps(DatasetWizardUserDmpCriteria datasetWizardAutocompleteRequest, UserInfo userInfo);
|
||||
|
||||
QueryableList<DMP> getAuthenticated(QueryableList<DMP> query, UUID principalId, List<Integer> roles);
|
||||
QueryableList<DMP> getAuthenticated(QueryableList<DMP> query, UUID principalId);
|
||||
|
||||
}
|
|
@ -5,147 +5,115 @@ import eu.eudat.data.dao.criteria.DataManagementPlanCriteria;
|
|||
import eu.eudat.data.dao.criteria.DatasetWizardUserDmpCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.UserDMP;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.queryable.types.FieldSelectionType;
|
||||
import eu.eudat.queryable.types.SelectionField;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
import schemasMicrosoftComOfficeOffice.LeftDocument;
|
||||
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("dMPDao")
|
||||
public class DMPDaoImpl extends DatabaseAccess<DMP> implements DMPDao {
|
||||
|
||||
@Autowired
|
||||
public DMPDaoImpl(DatabaseService<DMP> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMP> getWithCriteria(DataManagementPlanCriteria criteria) {
|
||||
QueryableList<DMP> query = getDatabaseService().getQueryable(DMP.getHints(), DMP.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.like(builder.upper(root.get("description")), "%" + criteria.getLike().toUpperCase() + "%")));
|
||||
if (criteria.getPeriodEnd() != null)
|
||||
query.where((builder, root) -> builder.lessThan(root.get("created"), criteria.getPeriodEnd()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("created"), criteria.getPeriodStart()));
|
||||
if (criteria.getProfile() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("profile"), criteria.getProfile()));
|
||||
if (criteria.getGrants() != null && !criteria.getGrants().isEmpty())
|
||||
query.where(((builder, root) -> root.get("grant").in(criteria.getGrants())));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("version"),
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(
|
||||
builder1.equal(externalRoot.get("groupId"), nestedRoot.get("groupId")),
|
||||
builder1.notEqual(nestedRoot.get("status"), DMP.DMPStatus.DELETED.getValue())), Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("groupId").in(criteria.getGroupIds()));
|
||||
if (criteria.getStatus() != null) {
|
||||
if (criteria.getStatus() == DMP.DMPStatus.FINALISED.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMP.DMPStatus.FINALISED.getValue()));
|
||||
} else if (criteria.getStatus() == DMP.DMPStatus.ACTIVE.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMP.DMPStatus.ACTIVE.getValue()));
|
||||
}
|
||||
}
|
||||
if (criteria.getIsPublic()) {
|
||||
query.where(((builder, root) -> builder.equal(root.get("isPublic"), criteria.getIsPublic())));
|
||||
}
|
||||
/*if (criteria.getRole() != null) {
|
||||
if (criteria.getRole().equals(UserDMP.UserDMPRoles.OWNER.getValue())) {
|
||||
query.where((builder, root) -> builder.equal(root.join("users", JoinType.LEFT).get("role"), UserDMP.UserDMPRoles.OWNER.getValue()));
|
||||
} else if (criteria.getRole().equals(UserDMP.UserDMPRoles.USER.getValue())) {
|
||||
query.where((builder, root) -> builder.equal(root.join("users", JoinType.LEFT).get("role"), UserDMP.UserDMPRoles.USER.getValue()));
|
||||
@Autowired
|
||||
public DMPDaoImpl(DatabaseService<DMP> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMP> getWithCriteria(DataManagementPlanCriteria criteria) {
|
||||
QueryableList<DMP> query = getDatabaseService().getQueryable(DMP.getHints(), DMP.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.like(builder.upper(root.get("description")), "%" + criteria.getLike().toUpperCase() + "%")));
|
||||
if (criteria.getPeriodEnd() != null)
|
||||
query.where((builder, root) -> builder.lessThan(root.get("created"), criteria.getPeriodEnd()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("created"), criteria.getPeriodStart()));
|
||||
if (criteria.getProjects() != null && !criteria.getProjects().isEmpty())
|
||||
query.where(((builder, root) -> root.get("project").in(criteria.getProjects())));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("version"),
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("groupId"),
|
||||
nestedRoot.get("groupId")), Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("groupId").in(criteria.getGroupIds()));
|
||||
if (criteria.getStatus() != null) {
|
||||
if (criteria.getStatus() == DMP.DMPStatus.FINALISED.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMP.DMPStatus.FINALISED.getValue()));
|
||||
} else if (criteria.getStatus() == DMP.DMPStatus.ACTIVE.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMP.DMPStatus.ACTIVE.getValue()));
|
||||
}
|
||||
}*/
|
||||
if (criteria.getOrganisations() != null && !criteria.getOrganisations().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("organisations").get("reference").in(criteria.getOrganisations()));
|
||||
}
|
||||
if (criteria.getCollaborators() != null && !criteria.getCollaborators().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id").in(criteria.getCollaborators()));
|
||||
}
|
||||
if (criteria.getDatasetTemplates() != null && !criteria.getDatasetTemplates().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("associatedDmps", JoinType.LEFT).get("datasetprofile").get("id").in(criteria.getDatasetTemplates()));
|
||||
}
|
||||
if (criteria.getGrantStatus() != null) {
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.FINISHED.getValue().shortValue()))
|
||||
query.where((builder, root) -> builder.lessThan(root.get("grant").get("enddate"), new Date()));
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.ONGOING.getValue().shortValue()))
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.greaterThan(root.get("grant").get("enddate"), new Date())
|
||||
, builder.isNull(root.get("grant").get("enddate"))));
|
||||
}
|
||||
}
|
||||
if (criteria.getRole() != null) {
|
||||
if (criteria.getRole().equals(UserDMP.UserDMPRoles.OWNER.getValue())){
|
||||
query.where((builder, root) -> builder.equal(root.join("users").get("role"), UserDMP.UserDMPRoles.OWNER.getValue()));
|
||||
} else if (criteria.getRole().equals(UserDMP.UserDMPRoles.USER.getValue())){
|
||||
query.where((builder, root) -> builder.equal(root.join("users").get("role"), UserDMP.UserDMPRoles.USER.getValue()));
|
||||
}
|
||||
}
|
||||
if (criteria.getOrganisations() != null && !criteria.getOrganisations().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("organisations").get("reference").in(criteria.getOrganisations()));
|
||||
}
|
||||
if (criteria.getCollaborators() != null && !criteria.getCollaborators().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id").in(criteria.getCollaborators()));
|
||||
}
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), DMP.DMPStatus.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
|
||||
if (criteria.hasDoi()) {
|
||||
query.where((builder, root) -> builder.not(builder.isNull(root.join("dois").get("id"))));
|
||||
}
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), DMP.DMPStatus.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
public QueryableList<DMP> getAuthenticated(QueryableList<DMP> query, UUID principal) {
|
||||
query.where((builder, root) -> builder.equal(root.join("users", JoinType.LEFT).join("user").get("id"), principal));
|
||||
return query;
|
||||
}
|
||||
|
||||
public QueryableList<DMP> getAuthenticated(QueryableList<DMP> query, UUID principal, List<Integer> roles) {
|
||||
if (roles != null && !roles.isEmpty()) {
|
||||
query.where((builder, root) -> {
|
||||
Join userJoin = root.join("users", JoinType.LEFT);
|
||||
return builder.and(builder.equal(userJoin.join("user", JoinType.LEFT).get("id"), principal), userJoin.get("role").in(roles));
|
||||
});
|
||||
} else {
|
||||
query.where((builder, root) -> builder.equal(root.join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), principal));
|
||||
}
|
||||
@Override
|
||||
public DMP createOrUpdate(DMP item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DMP.class);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
@Override
|
||||
public DMP find(UUID id) {
|
||||
return getDatabaseService().getQueryable(DMP.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public DMP createOrUpdate(DMP item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DMP.class);
|
||||
}
|
||||
@Override
|
||||
public QueryableList<DMP> getUserDmps(DatasetWizardUserDmpCriteria datasetWizardUserDmpCriteria, UserInfo userInfo) {
|
||||
QueryableList<DMP> query = getDatabaseService().getQueryable(DMP.class).where((builder, root) -> builder.or(builder.equal(root.get("creator"), userInfo), builder.isMember(userInfo, root.get("users"))));
|
||||
if (datasetWizardUserDmpCriteria.getLike() != null && !datasetWizardUserDmpCriteria.getLike().isEmpty()) {
|
||||
query.where((builder, root) -> builder.like(root.get("label"), "%" + datasetWizardUserDmpCriteria.getLike() + "%"));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DMP find(UUID id) {
|
||||
return getDatabaseService().getQueryable(DMP.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
@Override
|
||||
public void delete(DMP item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMP> getUserDmps(DatasetWizardUserDmpCriteria datasetWizardUserDmpCriteria, UserInfo userInfo) {
|
||||
QueryableList<DMP> query = getDatabaseService().getQueryable(DMP.class).where((builder, root) -> builder.or(builder.equal(root.get("creator"), userInfo), builder.isMember(userInfo, root.get("users"))));
|
||||
if (datasetWizardUserDmpCriteria.getLike() != null && !datasetWizardUserDmpCriteria.getLike().isEmpty()) {
|
||||
query.where((builder, root) -> builder.like(root.get("label"), "%" + datasetWizardUserDmpCriteria.getLike() + "%"));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
@Override
|
||||
public QueryableList<DMP> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DMP.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DMP item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<DMP> createOrUpdateAsync(DMP item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMP> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DMP.class);
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<DMP> createOrUpdateAsync(DMP item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DMP find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public DMP find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanBlueprintCriteria;
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanProfileCriteria;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.DMPProfile;
|
||||
|
@ -16,6 +15,4 @@ public interface DMPProfileDao extends DatabaseAccessLayer<DMPProfile, UUID> {
|
|||
|
||||
QueryableList<DMPProfile> getWithCriteria(DataManagementPlanProfileCriteria criteria);
|
||||
|
||||
QueryableList<DMPProfile> getWithCriteriaBlueprint(DataManagementPlanBlueprintCriteria criteria);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanBlueprintCriteria;
|
||||
import eu.eudat.data.dao.criteria.DataManagementPlanProfileCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMPProfile;
|
||||
|
@ -66,20 +65,4 @@ public class DMPProfileDaoImpl extends DatabaseAccess<DMPProfile> implements DMP
|
|||
query.where(((builder, root) -> builder.notEqual(root.get("status"), DMPProfile.Status.DELETED.getValue())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMPProfile> getWithCriteriaBlueprint(DataManagementPlanBlueprintCriteria criteria){
|
||||
QueryableList<DMPProfile> query = getDatabaseService().getQueryable(DMPProfile.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"));
|
||||
if (criteria.getStatus() != null) {
|
||||
if (criteria.getStatus() == DMPProfile.Status.FINALIZED.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMPProfile.Status.FINALIZED.getValue()));
|
||||
} else if (criteria.getStatus() == DMPProfile.Status.SAVED.getValue()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), DMPProfile.Status.SAVED.getValue()));
|
||||
}
|
||||
}
|
||||
query.where(((builder, root) -> builder.notEqual(root.get("status"), DMPProfile.Status.DELETED.getValue())));
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,11 +24,7 @@ public class DataRepositoryDaoImpl extends DatabaseAccess<DataRepository> implem
|
|||
public QueryableList<DataRepository> getWithCriteria(DataRepositoryCriteria criteria) {
|
||||
QueryableList<DataRepository> query = this.getDatabaseService().getQueryable(DataRepository.class);
|
||||
if (criteria.getLike() != null)
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("reference")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.equal(builder.upper(root.get("label")), criteria.getLike().toUpperCase())));
|
||||
if (criteria.getCreationUserId() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("creationUser").get("id"), criteria.getCreationUserId()));
|
||||
query.where((builder, root) -> builder.equal(root.get("reference"), criteria.getLike()));
|
||||
return query;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,16 +6,13 @@ import eu.eudat.data.entities.Dataset;
|
|||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DatasetDao extends DatabaseAccessLayer<Dataset, UUID> {
|
||||
|
||||
QueryableList<Dataset> getWithCriteria(DatasetCriteria criteria);
|
||||
|
||||
QueryableList<Dataset> filterFromElastic(DatasetCriteria criteria, List<UUID> ids);
|
||||
|
||||
QueryableList<Dataset> getAuthenticated(QueryableList<Dataset> query, UserInfo principal, List<Integer> roles);
|
||||
QueryableList<Dataset> getAuthenticated(QueryableList<Dataset> query, UserInfo principal);
|
||||
|
||||
Dataset isPublicDataset(UUID id);
|
||||
|
||||
|
|
|
@ -5,21 +5,16 @@ import eu.eudat.data.dao.criteria.DatasetCriteria;
|
|||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.Dataset;
|
||||
import eu.eudat.data.entities.UserDMP;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.queryable.types.FieldSelectionType;
|
||||
import eu.eudat.queryable.types.SelectionField;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
|
@ -32,13 +27,6 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
@Override
|
||||
public QueryableList<Dataset> getWithCriteria(DatasetCriteria criteria) {
|
||||
QueryableList<Dataset> query = getDatabaseService().getQueryable(Dataset.getHints(), Dataset.class);
|
||||
if (criteria.getIsPublic() != null && criteria.getIsPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.get("dmp").get("isPublic"), true));
|
||||
query.where((builder, root) -> builder.equal(root.get("status"), Dataset.Status.FINALISED.getValue()));
|
||||
/*query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"),
|
||||
query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")),
|
||||
Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));*/
|
||||
}
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
|
@ -52,48 +40,22 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("created"), criteria.getPeriodStart()));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"), query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")), builder1.notEqual(nestedRoot.get("dmp").get("status"), DMP.DMPStatus.DELETED.getValue())), Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("dmp").get("groupId").in(criteria.getGroupIds()));
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"), query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")), Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
if (criteria.getDmpIds() != null && !criteria.getDmpIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("dmp").get("id").in(criteria.getDmpIds()));
|
||||
/*if (criteria.getRole() != null) {
|
||||
if (criteria.getRole() != null) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmp").join("users").get("role"), criteria.getRole()));
|
||||
} else {
|
||||
query.where((builder, root) -> root.join("dmp").join("users").get("role").in(UserDMP.UserDMPRoles.getAllValues()));
|
||||
}*/
|
||||
if (criteria.getOrganisations() != null && !criteria.getOrganisations().isEmpty())
|
||||
}
|
||||
if (criteria.getOrganisations() != null && !criteria.getOrganisations().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("dmp").join("organisations").get("reference").in(criteria.getOrganisations()));
|
||||
if (criteria.getGrants() != null && !criteria.getGrants().isEmpty())
|
||||
query.where((builder, root) -> root.join("dmp").join("grant").get("id").in(criteria.getGrants()));
|
||||
if (criteria.getGrantStatus() != null) {
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.FINISHED.getValue().shortValue()))
|
||||
query.where((builder, root) -> builder.lessThan(root.get("dmp").get("grant").get("enddate"), new Date()));
|
||||
if (criteria.getGrantStatus().equals(GrantStateType.ONGOING.getValue().shortValue()))
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.greaterThan(root.get("dmp").get("grant").get("enddate"), new Date())
|
||||
, builder.isNull(root.get("dmp").get("grant").get("enddate"))));
|
||||
}
|
||||
if (criteria.getCollaborators() != null && !criteria.getCollaborators().isEmpty())
|
||||
if (criteria.getProjects() != null && !criteria.getProjects().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("dmp").join("project").get("id").in(criteria.getProjects()));
|
||||
}
|
||||
if (criteria.getCollaborators() != null && !criteria.getCollaborators().isEmpty()) {
|
||||
query.where((builder, root) -> root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id").in(criteria.getCollaborators()));
|
||||
if (criteria.getDatasetTemplates() != null && !criteria.getDatasetTemplates().isEmpty())
|
||||
query.where((builder, root) -> root.get("profile").get("id").in(criteria.getDatasetTemplates()));
|
||||
|
||||
if (criteria.hasDoi()) {
|
||||
query.where((builder, root) -> builder.not(builder.isNull(root.join("dmp").join("dois").get("id"))));
|
||||
}
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Dataset.Status.DELETED.getValue()));
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Dataset.Status.CANCELED.getValue()));
|
||||
return query;
|
||||
}
|
||||
|
||||
public QueryableList<Dataset> filterFromElastic(DatasetCriteria criteria, List<UUID> ids) {
|
||||
QueryableList<Dataset> query = getDatabaseService().getQueryable(Dataset.getHints(), Dataset.class);
|
||||
|
||||
query.where(((builder, root) -> root.get("id").in(ids)));
|
||||
if (!criteria.getAllVersions())
|
||||
query.initSubQuery(String.class).where((builder, root) -> builder.equal(root.get("dmp").get("version"), query.<String>subQueryMax((builder1, externalRoot, nestedRoot) -> builder1.and(builder1.equal(externalRoot.get("dmp").get("groupId"), nestedRoot.get("dmp").get("groupId")), builder1.notEqual(nestedRoot.get("dmp").get("status"), DMP.DMPStatus.DELETED.getValue())), Arrays.asList(new SelectionField(FieldSelectionType.COMPOSITE_FIELD, "dmp:version")), String.class)));
|
||||
|
||||
query.where(((builder, root) -> builder.notEqual(root.get("status"), Dataset.Status.DELETED.getValue())));
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -104,14 +66,12 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
|
||||
@Override
|
||||
public Dataset find(UUID id) {
|
||||
return getDatabaseService().getQueryable(Dataset.class)
|
||||
.where((builder, root) -> builder.and(builder.notEqual(root.get("status"),Dataset.Status.DELETED.getValue()), builder.notEqual(root.get("status"),Dataset.Status.CANCELED.getValue()), builder.equal((root.get("id")), id))).getSingle();
|
||||
return getDatabaseService().getQueryable(Dataset.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Dataset find(UUID id, String hint) {
|
||||
return getDatabaseService().getQueryable(Dataset.getHints(), Dataset.class).withHint(hint)
|
||||
.where((builder, root) -> builder.and(builder.notEqual(root.get("status"),Dataset.Status.DELETED.getValue()), builder.notEqual(root.get("status"),Dataset.Status.CANCELED.getValue()), builder.equal((root.get("id")), id))).getSingle();
|
||||
return getDatabaseService().getQueryable(Dataset.getHints(), Dataset.class).withHint(hint).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,15 +83,10 @@ public class DatasetDaoImpl extends DatabaseAccess<Dataset> implements DatasetDa
|
|||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Dataset> getAuthenticated(QueryableList<Dataset> query, UserInfo principal, List<Integer> roles) {
|
||||
if (roles != null && !roles.isEmpty()) {
|
||||
query.where((builder, root) -> {
|
||||
Join userJoin = root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT);
|
||||
return builder.and(builder.equal(userJoin.join("user", JoinType.LEFT).get("id"), principal.getId()), userJoin.get("role").in(roles));
|
||||
});
|
||||
} else {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), principal.getId()));
|
||||
|
||||
public QueryableList<Dataset> getAuthenticated(QueryableList<Dataset> query, UserInfo principal) {
|
||||
if (principal.getId() == null) query.where((builder, root) -> builder.equal(root.get("isPublic"), true));
|
||||
else {
|
||||
query.where((builder, root) -> builder.or(builder.equal(root.get("dmp").get("creator"), principal), builder.equal(root.join("dmp", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), principal.getId())));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package eu.eudat.data.dao.entities;
|
|||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DatasetExternalDataset;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
|
|
|
@ -2,23 +2,15 @@ package eu.eudat.data.dao.entities;
|
|||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.DatasetProfileCriteria;
|
||||
import eu.eudat.data.entities.DescriptionTemplate;
|
||||
import eu.eudat.data.entities.DescriptionTemplateType;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DatasetProfileDao extends DatabaseAccessLayer<DescriptionTemplate, UUID> {
|
||||
public interface DatasetProfileDao extends DatabaseAccessLayer<DatasetProfile, UUID> {
|
||||
|
||||
QueryableList<DescriptionTemplate> getWithCriteria(DatasetProfileCriteria criteria);
|
||||
QueryableList<DatasetProfile> getWithCriteria(DatasetProfileCriteria criteria);
|
||||
|
||||
QueryableList<DescriptionTemplate> getAll();
|
||||
|
||||
QueryableList<DescriptionTemplate> getAuthenticated(QueryableList<DescriptionTemplate> query, UUID principal, List<Integer> roles);
|
||||
|
||||
List<DescriptionTemplate> getAllIds();
|
||||
|
||||
Long countWithType(DescriptionTemplateType type);
|
||||
QueryableList<DatasetProfile> getAll();
|
||||
|
||||
}
|
|
@ -3,8 +3,7 @@ package eu.eudat.data.dao.entities;
|
|||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.DatasetProfileCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DescriptionTemplate;
|
||||
import eu.eudat.data.entities.DescriptionTemplateType;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.queryable.types.FieldSelectionType;
|
||||
import eu.eudat.queryable.types.SelectionField;
|
||||
|
@ -12,25 +11,21 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("datasetProfileDao")
|
||||
public class DatasetProfileDaoImpl extends DatabaseAccess<DescriptionTemplate> implements DatasetProfileDao {
|
||||
public class DatasetProfileDaoImpl extends DatabaseAccess<DatasetProfile> implements DatasetProfileDao {
|
||||
|
||||
@Autowired
|
||||
public DatasetProfileDaoImpl(DatabaseService<DescriptionTemplate> databaseService) {
|
||||
public DatasetProfileDaoImpl(DatabaseService<DatasetProfile> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DescriptionTemplate> getWithCriteria(DatasetProfileCriteria criteria) {
|
||||
QueryableList<DescriptionTemplate> query = getDatabaseService().getQueryable(DescriptionTemplate.class);
|
||||
public QueryableList<DatasetProfile> getWithCriteria(DatasetProfileCriteria criteria) {
|
||||
QueryableList<DatasetProfile> query = getDatabaseService().getQueryable(DatasetProfile.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"));
|
||||
if (!criteria.getAllVersions())
|
||||
|
@ -39,96 +34,43 @@ public class DatasetProfileDaoImpl extends DatabaseAccess<DescriptionTemplate> i
|
|||
nestedRoot.get("groupId")), Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "version")), String.class)));
|
||||
if (criteria.getGroupIds() != null && !criteria.getGroupIds().isEmpty())
|
||||
query.where((builder, root) -> root.get("groupId").in(criteria.getGroupIds()));
|
||||
if (criteria.getFilter() != null && criteria.getUserId() != null) {
|
||||
if (criteria.getFilter().equals(DatasetProfileCriteria.DatasetProfileFilter.DMPs.getValue())) {
|
||||
query.initSubQuery(UUID.class).where((builder, root) ->
|
||||
builder.and(root.get("id").in(
|
||||
query.subQuery((builder1, root1) -> builder1.equal(root1.join("dmps", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), criteria.getUserId()),
|
||||
Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "id")))),
|
||||
builder.notEqual(root.get("id"), criteria.getUserId())));
|
||||
//query.where(((builder, root) -> builder.equal(root.join("dmps", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), criteria.getUserId())));
|
||||
}
|
||||
if (criteria.getFilter().equals(DatasetProfileCriteria.DatasetProfileFilter.Datasets.getValue())) {
|
||||
query.initSubQuery(UUID.class).where((builder, root) ->
|
||||
builder.and(root.get("id").in(
|
||||
query.subQuery((builder1, root1) -> builder1.equal(root1.join("dataset", JoinType.LEFT).join("dmp", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), criteria.getUserId()),
|
||||
Arrays.asList(new SelectionField(FieldSelectionType.FIELD, "id")))),
|
||||
builder.notEqual(root.get("id"), criteria.getUserId())));
|
||||
}
|
||||
}
|
||||
if (criteria.getStatus() != null) {
|
||||
query.where(((builder, root) -> builder.equal(root.get("status"), criteria.getStatus())));
|
||||
}
|
||||
if (criteria.getIds() != null) {
|
||||
query.where(((builder, root) -> root.get("id").in(criteria.getIds())));
|
||||
}
|
||||
if (criteria.getFinalized()) {
|
||||
query.where(((builder, root) -> builder.equal(root.get("status"), DescriptionTemplate.Status.FINALIZED.getValue())));
|
||||
} else {
|
||||
query.where(((builder, root) -> builder.notEqual(root.get("status"), DescriptionTemplate.Status.DELETED.getValue())));
|
||||
}
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThanOrEqualTo(root.get("created"), criteria.getPeriodStart()));
|
||||
query.where(((builder, root) -> builder.notEqual(root.get("status"), DatasetProfile.Status.DELETED.getValue())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplate createOrUpdate(DescriptionTemplate item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DescriptionTemplate.class);
|
||||
public DatasetProfile createOrUpdate(DatasetProfile item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DatasetProfile.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplate find(UUID id) {
|
||||
return getDatabaseService().getQueryable(DescriptionTemplate.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
public DatasetProfile find(UUID id) {
|
||||
return getDatabaseService().getQueryable(DatasetProfile.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DescriptionTemplate> getAll() {
|
||||
return getDatabaseService().getQueryable(DescriptionTemplate.class);
|
||||
public QueryableList<DatasetProfile> getAll() {
|
||||
return getDatabaseService().getQueryable(DatasetProfile.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DescriptionTemplate> getAllIds(){
|
||||
return getDatabaseService().getQueryable(DescriptionTemplate.class).withFields(Collections.singletonList("id")).toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DescriptionTemplate item) {
|
||||
public void delete(DatasetProfile item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DescriptionTemplate> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DescriptionTemplate.class);
|
||||
public QueryableList<DatasetProfile> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DatasetProfile.class);
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<DescriptionTemplate> createOrUpdateAsync(DescriptionTemplate item) {
|
||||
public CompletableFuture<DatasetProfile> createOrUpdateAsync(DatasetProfile item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplate find(UUID id, String hint) {
|
||||
public DatasetProfile find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DescriptionTemplate> getAuthenticated(QueryableList<DescriptionTemplate> query, UUID principal, List<Integer> roles) {
|
||||
if (roles != null && !roles.isEmpty()) {
|
||||
query.where((builder, root) -> {
|
||||
Join userJoin = root.join("users", JoinType.LEFT);
|
||||
return builder.and(builder.equal(userJoin.join("user", JoinType.LEFT).get("id"), principal), userJoin.get("role").in(roles));
|
||||
});
|
||||
} else {
|
||||
query.where((builder, root) -> builder.equal(root.join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), principal));
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long countWithType(DescriptionTemplateType type) {
|
||||
return this.getDatabaseService().getQueryable(DescriptionTemplate.class).where((builder, root) -> builder.equal(root.get("type"), type)).count();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.data.entities.DatasetService;
|
||||
|
||||
import java.util.UUID;
|
||||
|
|
|
@ -2,6 +2,8 @@ package eu.eudat.data.dao.entities;
|
|||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DatasetExternalDataset;
|
||||
import eu.eudat.data.entities.DatasetProfile;
|
||||
import eu.eudat.data.entities.DatasetService;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.DescriptionTemplateType;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DescriptionTemplateTypeDao extends DatabaseAccessLayer<DescriptionTemplateType, UUID> {
|
||||
DescriptionTemplateType findFromName(String name);
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DescriptionTemplateType;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.transaction.Transactional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("descriptionTemplateTypeDao")
|
||||
public class DescriptionTemplateTypeDaoImpl extends DatabaseAccess<DescriptionTemplateType> implements DescriptionTemplateTypeDao {
|
||||
|
||||
@Autowired
|
||||
public DescriptionTemplateTypeDaoImpl(DatabaseService<DescriptionTemplateType> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplateType findFromName(String name){
|
||||
try {
|
||||
return this.getDatabaseService().getQueryable(DescriptionTemplateType.class).where((builder, root) -> builder.and(builder.equal(root.get("name"), name), builder.notEqual(root.get("status"), DescriptionTemplateType.Status.DELETED.getValue()))).getSingle();
|
||||
}
|
||||
catch(Exception e){
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public DescriptionTemplateType createOrUpdate(DescriptionTemplateType item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DescriptionTemplateType.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplateType find(UUID id) {
|
||||
return getDatabaseService().getQueryable(DescriptionTemplateType.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DescriptionTemplateType item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DescriptionTemplateType> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DescriptionTemplateType.class).where((builder, root) -> builder.notEqual((root.get("status")), DescriptionTemplateType.Status.DELETED.getValue()));
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<DescriptionTemplateType> createOrUpdateAsync(DescriptionTemplateType item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DescriptionTemplateType find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.DMPDatasetProfile;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface DmpDatasetProfileDao extends DatabaseAccessLayer<DMPDatasetProfile, UUID> {
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Content;
|
||||
import eu.eudat.data.entities.DMPDatasetProfile;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("dmpDatasetProfileDao")
|
||||
public class DmpDatasetProfileDaoImpl extends DatabaseAccess<DMPDatasetProfile> implements DmpDatasetProfileDao {
|
||||
@Autowired
|
||||
public DmpDatasetProfileDaoImpl(DatabaseService<DMPDatasetProfile> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DMPDatasetProfile createOrUpdate(DMPDatasetProfile item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, DMPDatasetProfile.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Async
|
||||
public CompletableFuture<DMPDatasetProfile> createOrUpdateAsync(DMPDatasetProfile item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DMPDatasetProfile find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(DMPDatasetProfile.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DMPDatasetProfile find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DMPDatasetProfile item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<DMPDatasetProfile> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(DMPDatasetProfile.class);
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.EmailConfirmationCriteria;
|
||||
import eu.eudat.data.entities.EmailConfirmation;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface EmailConfirmationDao extends DatabaseAccessLayer<EmailConfirmation, UUID> {
|
||||
|
||||
QueryableList<EmailConfirmation> getWithCriteria(EmailConfirmationCriteria criteria);
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.EmailConfirmationCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.EmailConfirmation;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("LoginConfirmationEmailDao")
|
||||
public class EmailConfirmationDaoImpl extends DatabaseAccess<EmailConfirmation> implements EmailConfirmationDao {
|
||||
|
||||
@Autowired
|
||||
public EmailConfirmationDaoImpl(DatabaseService<EmailConfirmation> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<EmailConfirmation> getWithCriteria(EmailConfirmationCriteria criteria) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EmailConfirmation createOrUpdate(EmailConfirmation item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, EmailConfirmation.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<EmailConfirmation> createOrUpdateAsync(EmailConfirmation item) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EmailConfirmation find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(EmailConfirmation.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public EmailConfirmation find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(EmailConfirmation item) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<EmailConfirmation> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(EmailConfirmation.class);
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.EntityDoi;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface EntityDoiDao extends DatabaseAccessLayer<EntityDoi, UUID> {
|
||||
EntityDoi findFromDoi(String doi);
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.EntityDoi;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("EntityDoiDao")
|
||||
public class EntityDoiDaoImpl extends DatabaseAccess<EntityDoi> implements EntityDoiDao {
|
||||
|
||||
@Autowired
|
||||
public EntityDoiDaoImpl(DatabaseService<EntityDoi> databaseService){
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public EntityDoi createOrUpdate(EntityDoi item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, EntityDoi.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<EntityDoi> createOrUpdateAsync(EntityDoi item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntityDoi find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(EntityDoi.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntityDoi findFromDoi(String doi) {
|
||||
return this.getDatabaseService().getQueryable(EntityDoi.class).where((builder, root) -> builder.equal(root.get("doi"), doi)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntityDoi find(UUID id, String hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(EntityDoi item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<EntityDoi> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(EntityDoi.class);
|
||||
}
|
||||
}
|
|
@ -25,11 +25,7 @@ public class ExternalDatasetDaoImpl extends DatabaseAccess<ExternalDataset> impl
|
|||
public QueryableList<ExternalDataset> getWithCriteria(ExternalDatasetCriteria criteria) {
|
||||
QueryableList<ExternalDataset> query = this.getDatabaseService().getQueryable(ExternalDataset.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) -> builder.or(
|
||||
builder.like(builder.upper(root.get("reference")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.equal(builder.upper(root.get("label")), criteria.getLike().toUpperCase())));
|
||||
if (criteria.getCreationUserId() != null)
|
||||
query.where((builder, root) -> builder.equal(root.join("creationUser").get("id"), criteria.getCreationUserId()));
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"));
|
||||
return query;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.entities.FileUpload;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public interface FileUploadDao extends DatabaseAccessLayer<FileUpload, UUID> {
|
||||
List<FileUpload> getFileUploads(UUID entityId);
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.FileUpload;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("FileUploadDao")
|
||||
public class FileUploadDaoImpl extends DatabaseAccess<FileUpload> implements FileUploadDao {
|
||||
|
||||
@Autowired
|
||||
public FileUploadDaoImpl(DatabaseService<FileUpload> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileUpload createOrUpdate(FileUpload item) {
|
||||
return getDatabaseService().createOrUpdate(item, FileUpload.class);
|
||||
}
|
||||
//
|
||||
@Override
|
||||
public CompletableFuture<FileUpload> createOrUpdateAsync(FileUpload item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileUpload find(UUID id) {
|
||||
return getDatabaseService().getQueryable(FileUpload.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<FileUpload> getFileUploads(UUID entityId) {
|
||||
return this.getDatabaseService().getQueryable(FileUpload.class).where((builder, root) -> builder.equal(root.get("entityId"), entityId)).toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileUpload find(UUID id, String hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(FileUpload item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<FileUpload> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(FileUpload.class);
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.FunderCriteria;
|
||||
import eu.eudat.data.entities.Funder;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface FunderDao extends DatabaseAccessLayer<Funder, UUID> {
|
||||
|
||||
QueryableList<Funder> getWithCritetia(FunderCriteria criteria);
|
||||
|
||||
QueryableList<Funder> getAuthenticated(QueryableList<Funder> query, UserInfo principal);
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.FunderCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Funder;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("funderDao")
|
||||
public class FunderDaoImpl extends DatabaseAccess<Funder> implements FunderDao {
|
||||
|
||||
@Autowired
|
||||
public FunderDaoImpl(DatabaseService<Funder> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Funder> getWithCritetia(FunderCriteria criteria) {
|
||||
QueryableList<Funder> query = getDatabaseService().getQueryable(Funder.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.or(builder.like(builder.upper(root.get("definition")), "%" + criteria.getLike().toUpperCase() + "%"))));
|
||||
if (criteria.getReference() != null)
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("reference")), "%" + criteria.getReference().toUpperCase() + "%"));
|
||||
if (criteria.getExactReference() != null)
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("reference")), criteria.getExactReference().toUpperCase()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThanOrEqualTo(root.get("created"), criteria.getPeriodStart()));
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Funder.Status.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Funder> getAuthenticated(QueryableList<Funder> query, UserInfo principal) {
|
||||
query.where((builder, root) -> builder.equal(root.get("creationUser"), principal));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Funder createOrUpdate(Funder item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Funder.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Funder> createOrUpdateAsync(Funder item) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Funder find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Funder.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Funder find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Funder item) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Funder> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Funder.class);
|
||||
}
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.GrantCriteria;
|
||||
import eu.eudat.data.entities.Grant;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface GrantDao extends DatabaseAccessLayer<Grant, UUID> {
|
||||
|
||||
QueryableList<Grant> getWithCriteria(GrantCriteria criteria);
|
||||
|
||||
QueryableList<Grant> getAuthenticated(QueryableList<Grant> query, UserInfo principal);
|
||||
|
||||
}
|
|
@ -1,102 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.GrantCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.Grant;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import eu.eudat.types.grant.GrantStateType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
import schemasMicrosoftComOfficeOffice.LeftDocument;
|
||||
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Component("grantDao")
|
||||
public class GrantDaoImpl extends DatabaseAccess<Grant> implements GrantDao {
|
||||
|
||||
@Autowired
|
||||
public GrantDaoImpl(DatabaseService<Grant> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Grant> getWithCriteria(GrantCriteria criteria) {
|
||||
QueryableList<Grant> query = getDatabaseService().getQueryable(Grant.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.or(builder.like(builder.upper(root.get("description")), "%" + criteria.getLike().toUpperCase() + "%"))));
|
||||
if (criteria.getPeriodEnd() != null)
|
||||
query.where((builder, root) -> builder.lessThan(root.get("enddate"), criteria.getPeriodEnd()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("startdate"), criteria.getPeriodStart()));
|
||||
if (criteria.getReference() != null)
|
||||
query.where((builder, root) -> builder.like(root.get("reference"), "%" + criteria.getReference() + "%"));
|
||||
if (criteria.getExactReference() != null)
|
||||
query.where((builder, root) -> builder.like(root.get("reference"), criteria.getExactReference()));
|
||||
if (criteria.getGrantStateType() != null) {
|
||||
if (criteria.getGrantStateType().equals(GrantStateType.FINISHED.getValue()))
|
||||
query.where((builder, root) -> builder.lessThan(root.get("enddate"), new Date()));
|
||||
if (criteria.getGrantStateType().equals(GrantStateType.ONGOING.getValue()))
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.greaterThan(root.get("enddate"), new Date())
|
||||
, builder.isNull(root.get("enddate"))));
|
||||
}
|
||||
if (criteria.isPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps").get("status"), DMP.DMPStatus.FINALISED.getValue())).distinct();
|
||||
}
|
||||
|
||||
if (criteria.isActive()) {
|
||||
query.where((builder, root) -> builder.notEqual(root.join("dmps").get("status"), DMP.DMPStatus.DELETED.getValue())).distinct();
|
||||
}
|
||||
if (criteria.getFunderId() != null && !criteria.getFunderId().trim().isEmpty())
|
||||
query.where((builder, root) -> builder.equal(root.get("funder").get("id"), UUID.fromString(criteria.getFunderId())));
|
||||
if (criteria.getFunderReference() != null && !criteria.getFunderReference().isEmpty())
|
||||
query.where((builder, root) -> builder.or(builder.like(root.join("funder", JoinType.LEFT).get("reference"), "%" + criteria.getFunderReference())));
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Grant.Status.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Grant createOrUpdate(Grant item) {
|
||||
return getDatabaseService().createOrUpdate(item, Grant.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Grant find(UUID id) {
|
||||
return getDatabaseService().getQueryable(Grant.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Grant item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Grant> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Grant.class);
|
||||
}
|
||||
|
||||
public QueryableList<Grant> getAuthenticated(QueryableList<Grant> query, UserInfo principal) {
|
||||
query.where((builder, root) -> builder.equal(root.get("creationUser").get("id"), principal.getId())).distinct();
|
||||
return query;
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<Grant> createOrUpdateAsync(Grant item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Grant find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface LockDao extends DatabaseAccessLayer<Lock, UUID> {
|
||||
|
||||
QueryableList<Lock> getWithCriteria(LockCriteria criteria);
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.LockCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("LockDao")
|
||||
public class LockDaoImpl extends DatabaseAccess<Lock> implements LockDao {
|
||||
|
||||
@Autowired
|
||||
public LockDaoImpl(DatabaseService<Lock> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> getWithCriteria(LockCriteria criteria) {
|
||||
QueryableList<Lock> query = this.getDatabaseService().getQueryable(Lock.class);
|
||||
if (criteria.getTouchedAt() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("touchedAt"), criteria.getTouchedAt()));
|
||||
if (criteria.getLockedBy() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("lockedBy"), criteria.getLockedBy())));
|
||||
if (criteria.getTarget() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("target"), criteria.getTarget())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock createOrUpdate(Lock item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Lock.class);
|
||||
}
|
||||
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<Lock> createOrUpdateAsync(Lock item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Lock.class).where(((builder, root) -> builder.equal(root.get("id"), id))).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Lock find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Lock item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Lock> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Lock.class);
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccessLayer;
|
||||
import eu.eudat.data.dao.criteria.NotificationCriteria;
|
||||
import eu.eudat.data.entities.Notification;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
public interface NotificationDao extends DatabaseAccessLayer<Notification, UUID> {
|
||||
|
||||
QueryableList<Notification> getWithCriteria(NotificationCriteria criteria);
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
package eu.eudat.data.dao.entities;
|
||||
|
||||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.NotificationCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.Lock;
|
||||
import eu.eudat.data.entities.Notification;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("NotificationDao")
|
||||
public class NotificationDaoImpl extends DatabaseAccess<Notification> implements NotificationDao {
|
||||
@Autowired
|
||||
public NotificationDaoImpl(DatabaseService<Notification> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Notification> getWithCriteria(NotificationCriteria criteria) {
|
||||
QueryableList<Notification> query = this.getDatabaseService().getQueryable(Notification.class);
|
||||
if (criteria.getIsActive() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("isActive"), criteria.getIsActive()));
|
||||
if (criteria.getNotifyState() != null)
|
||||
query.where(((builder, root) -> builder.equal(root.get("notifyState"), criteria.getNotifyState())));
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification createOrUpdate(Notification item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Notification.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Notification> createOrUpdateAsync(Notification item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.getDatabaseService().createOrUpdate(item, Notification.class));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Notification.class).where(((builder, root) -> builder.equal(root.get("id"), id))).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Notification find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Notification item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Notification> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Notification.class);
|
||||
}
|
||||
}
|
|
@ -26,23 +26,14 @@ public class OrganisationDaoImpl extends DatabaseAccess<Organisation> implements
|
|||
@Override
|
||||
public QueryableList<Organisation> getWithCriteria(OrganisationCriteria criteria) {
|
||||
QueryableList<Organisation> query = this.getDatabaseService().getQueryable(Organisation.class);
|
||||
if (criteria.getLabelLike() != null && criteria.getLike() != null) {
|
||||
query.where((builder, root) -> builder.or(builder.equal(root.get("reference"), criteria.getLike()), builder.like(builder.upper(root.get("label")), "%" + criteria.getLabelLike().toUpperCase() + "%")));
|
||||
} else {
|
||||
if (criteria.getLike() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("reference"), criteria.getLike()));
|
||||
if (criteria.getLabelLike() != null) {
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("label")), "%" + criteria.getLabelLike().toUpperCase() + "%"));
|
||||
}
|
||||
if (criteria.getPublic() != null && criteria.getPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps", JoinType.LEFT).get("status"), DMP.DMPStatus.FINALISED.getValue()));
|
||||
}
|
||||
if (criteria.getLike() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("reference"), criteria.getLike()));
|
||||
if (criteria.getLabelLike() != null) {
|
||||
query.where((builder, root) -> builder.like(builder.upper(root.get("label")), "%" + criteria.getLabelLike().toUpperCase() + "%"));
|
||||
}
|
||||
|
||||
if (criteria.isActive()) {
|
||||
query.where((builder, root) -> builder.notEqual(root.join("dmps").get("status"), DMP.DMPStatus.DELETED.getValue())).distinct();
|
||||
if (criteria.getPublic() != null && criteria.getPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps", JoinType.LEFT).get("status"), DMP.DMPStatus.FINALISED.getValue()));
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -73,7 +64,7 @@ public class OrganisationDaoImpl extends DatabaseAccess<Organisation> implements
|
|||
}
|
||||
|
||||
public QueryableList<Organisation> getAuthenticated(QueryableList<Organisation> query, UserInfo principal) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps").join("users").get("user"), principal));
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps").get("creator"), principal));
|
||||
return query;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,8 @@ import java.util.UUID;
|
|||
|
||||
public interface ProjectDao extends DatabaseAccessLayer<Project, UUID> {
|
||||
|
||||
QueryableList<Project> getWithCritetia(ProjectCriteria criteria);
|
||||
QueryableList<Project> getWithCriteria(ProjectCriteria criteria);
|
||||
|
||||
QueryableList<Project> getAuthenticated(QueryableList<Project> query, UserInfo principal);
|
||||
}
|
||||
QueryableList<Project> getAuthenticated(QueryableList<Project> query, UserInfo principal);
|
||||
|
||||
}
|
|
@ -3,71 +3,90 @@ package eu.eudat.data.dao.entities;
|
|||
import eu.eudat.data.dao.DatabaseAccess;
|
||||
import eu.eudat.data.dao.criteria.ProjectCriteria;
|
||||
import eu.eudat.data.dao.databaselayer.service.DatabaseService;
|
||||
import eu.eudat.data.entities.DMP;
|
||||
import eu.eudat.data.entities.Project;
|
||||
import eu.eudat.data.entities.UserInfo;
|
||||
import eu.eudat.queryable.QueryableList;
|
||||
import org.springframework.stereotype.Service;
|
||||
import eu.eudat.types.project.ProjectStateType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.persistence.criteria.JoinType;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
@Service("projectDao")
|
||||
@Component("projectDao")
|
||||
public class ProjectDaoImpl extends DatabaseAccess<Project> implements ProjectDao {
|
||||
|
||||
public ProjectDaoImpl(DatabaseService<Project> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
@Autowired
|
||||
public ProjectDaoImpl(DatabaseService<Project> databaseService) {
|
||||
super(databaseService);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Project> getWithCritetia(ProjectCriteria criteria) {
|
||||
QueryableList<Project> query = getDatabaseService().getQueryable(Project.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.or(builder.like(builder.upper(root.get("description")), "%" + criteria.getLike().toUpperCase() + "%"))));
|
||||
if (criteria.getReference() != null)
|
||||
query.where((builder, root) -> builder.like(root.get("reference"), "%" + criteria.getReference() + "%"));
|
||||
if (criteria.getExactReference() != null)
|
||||
query.where((builder, root) -> builder.like(root.get("reference"), criteria.getExactReference()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThanOrEqualTo(root.get("startdate"), criteria.getPeriodStart()));
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Project.Status.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
@Override
|
||||
public QueryableList<Project> getWithCriteria(ProjectCriteria criteria) {
|
||||
QueryableList<Project> query = getDatabaseService().getQueryable(Project.class);
|
||||
if (criteria.getLike() != null && !criteria.getLike().isEmpty())
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.like(builder.upper(root.get("label")), "%" + criteria.getLike().toUpperCase() + "%"),
|
||||
builder.or(builder.like(builder.upper(root.get("description")), "%" + criteria.getLike().toUpperCase() + "%"))));
|
||||
if (criteria.getPeriodEnd() != null)
|
||||
query.where((builder, root) -> builder.lessThan(root.get("enddate"), criteria.getPeriodEnd()));
|
||||
if (criteria.getPeriodStart() != null)
|
||||
query.where((builder, root) -> builder.greaterThan(root.get("startdate"), criteria.getPeriodStart()));
|
||||
if (criteria.getReference() != null)
|
||||
query.where((builder, root) -> builder.equal(root.get("reference"), criteria.getReference()));
|
||||
if (criteria.getProjectStateType() != null) {
|
||||
if (criteria.getProjectStateType().equals(ProjectStateType.FINISHED.getValue()))
|
||||
query.where((builder, root) -> builder.lessThan(root.get("enddate"), new Date()));
|
||||
if (criteria.getProjectStateType().equals(ProjectStateType.ONGOING.getValue()))
|
||||
query.where((builder, root) ->
|
||||
builder.or(builder.greaterThan(root.get("enddate"), new Date())
|
||||
, builder.isNull(root.get("enddate"))));
|
||||
}
|
||||
if (criteria.isPublic()) {
|
||||
query.where((builder, root) -> builder.equal(root.join("dmps").get("status"), DMP.DMPStatus.FINALISED.getValue())).distinct();
|
||||
}
|
||||
query.where((builder, root) -> builder.notEqual(root.get("status"), Project.Status.DELETED.getValue()));
|
||||
return query;
|
||||
}
|
||||
|
||||
public QueryableList<Project> getAuthenticated(QueryableList<Project> query, UserInfo principal) {
|
||||
query.where((builder, root) -> builder.or(builder.equal(root.get("creationUser"), principal), builder.equal(root.join("dmps", JoinType.LEFT).join("users", JoinType.LEFT).join("user", JoinType.LEFT).get("id"), principal.getId()))).distinct();
|
||||
return query;
|
||||
}
|
||||
@Override
|
||||
public Project createOrUpdate(Project item) {
|
||||
return getDatabaseService().createOrUpdate(item, Project.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Project createOrUpdate(Project item) {
|
||||
return this.getDatabaseService().createOrUpdate(item, Project.class);
|
||||
}
|
||||
@Override
|
||||
public Project find(UUID id) {
|
||||
return getDatabaseService().getQueryable(Project.class).where((builder, root) -> builder.equal((root.get("id")), id)).getSingle();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<Project> createOrUpdateAsync(Project item) {
|
||||
return null;
|
||||
}
|
||||
@Override
|
||||
public void delete(Project item) {
|
||||
this.getDatabaseService().delete(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Project find(UUID id) {
|
||||
return this.getDatabaseService().getQueryable(Project.class).where((builder, root) -> builder.equal(root.get("id"), id)).getSingle();
|
||||
}
|
||||
@Override
|
||||
public QueryableList<Project> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Project.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Project find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
public QueryableList<Project> getAuthenticated(QueryableList<Project> query, UserInfo principal) {
|
||||
//query.where((builder, root) -> builder.or(builder.equal(root.get("creationUser"), principal), builder.isMember(principal, root.join("dmps", JoinType.LEFT).get("users")))).distinct();
|
||||
query.where((builder, root) -> builder.or(builder.equal(root.get("creationUser"), principal), builder.equal(root.join("dmps").join("users", JoinType.LEFT).get("id"), principal.getId()))).distinct();
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Project item) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Async
|
||||
@Override
|
||||
public CompletableFuture<Project> createOrUpdateAsync(Project item) {
|
||||
return CompletableFuture.supplyAsync(() -> this.createOrUpdate(item));
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryableList<Project> asQueryable() {
|
||||
return this.getDatabaseService().getQueryable(Project.class);
|
||||
}
|
||||
@Override
|
||||
public Project find(UUID id, String hint) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue