dockerization
This commit is contained in:
parent
6a4d67de0c
commit
5087a37e15
11
.env
11
.env
|
@ -1,6 +1,5 @@
|
|||
TAG=6.3.0
|
||||
ENV=prod
|
||||
PROFILE=production
|
||||
AOT=aot
|
||||
ELASTIC_VERSION=6.3.0
|
||||
ELASTIC_PASSWORD=changeme
|
||||
PROFILE=docker
|
||||
|
||||
# Version of Elastic products
|
||||
ELK_VERSION=7.17.4
|
||||
STACK_VERSION=7.17.4
|
||||
|
|
|
@ -46,3 +46,5 @@ ELK.Docker/shared/data-elk/
|
|||
.settings/
|
||||
bin/
|
||||
*.classpath
|
||||
openDMP/dmp-backend/uploads/
|
||||
openDMP/dmp-backend/tmp/
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
# Using Docker Compose with Argos
|
||||
|
||||
ARGOS is an open extensible service that simplifies the management, validation, monitoring and maintenance and of Data Management Plans. It allows actors (researchers, managers, supervisors etc) to create actionable DMPs that may be freely exchanged among infrastructures for carrying out specific aspects of the Data management process in accordance with the intentions and commitment of Data owners.
|
||||
|
||||
## Before running the docker compose commands, configurations must be set
|
||||
|
||||
### Database
|
||||
|
||||
First of all, database must be configured
|
||||
|
||||
The only file that has to be changed is **/dmp-db-scema/Docker/dmp-db.env**
|
||||
|
||||
```bash
|
||||
ADMIN_USER: Admin username (app)
|
||||
ADMIN_PASSWORD: Admin password (app)
|
||||
|
||||
POSTGRES_DB: database name
|
||||
POSTGRES_USER: Admin username (database)
|
||||
POSTGRES_PASSWORD: Admin password (database)
|
||||
```
|
||||
|
||||
### Backend
|
||||
|
||||
Secondly, a few more options should be asigned
|
||||
|
||||
The file **/dmp-backend/web/src/main/resources/config/application-docker.properties** contains all the necessary properties
|
||||
|
||||
Values to be modified:
|
||||
```bash
|
||||
database.url: the url that is used to connect to database (JDBC based)
|
||||
database.username: database admin username
|
||||
database.password: database admin password
|
||||
|
||||
elasticsearch.*(optional): setup elastic, check Elasticsearch(optional) section below
|
||||
|
||||
google.login.clientId(optional): google as login provider
|
||||
```
|
||||
**NOTE:** if you want to configure and integrate other providers, check this reference [Setup configurable login](https://code-repo.d4science.org/MaDgiK-CITE/argos/wiki/Page-2A:-Setup-configurable-login)
|
||||
|
||||
If you provide google.login.clientId, then the same value should be set in the field named **loginProviders.googleConfiguration.clientId** which belongs to **/dmp-frontend/src/assets/config/config.json**
|
||||
|
||||
## You are ready to build and run the entire application using Docker-compose
|
||||
|
||||
1. Go to the project's root directory
|
||||
2. Type in the **Terminal** `docker volume create --name=dmpdata`
|
||||
3. Type in the **Terminal** `docker-compose up -d --build`
|
||||
4. After it's complete your application is running on [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
### Elasticsearch(optional)
|
||||
If you want to set up elasticsearch, you will need the password for the **elastic** user
|
||||
|
||||
After your application is running, type in the **Terminal** `docker exec -it elasticsearch /bin/sh`
|
||||
|
||||
Run the command `cat data/passwords.txt` in the shell and save its output
|
||||
|
||||
Finally, run `exit` to get back to your terminal
|
||||
|
||||
The elastic's password that you get has to be set in the **elasticsearch.password** property in the backend configuration
|
||||
|
||||
Rerun the application
|
||||
|
||||
1. Type in the **Terminal** `docker-compose down`
|
||||
2. Type in the **Terminal** `docker-compose up -d --build`
|
|
@ -1 +1 @@
|
|||
PROFILE=staging
|
||||
PROFILE=docker
|
|
@ -1,22 +1,22 @@
|
|||
FROM maven:3-jdk-8-alpine AS MAVEN_BUILD
|
||||
FROM maven:3-jdk-11 AS MAVEN_BUILD
|
||||
|
||||
|
||||
|
||||
COPY pom.xml /build/
|
||||
COPY data /build/data/
|
||||
COPY elastic /build/elastic/
|
||||
COPY logging /build/logging/
|
||||
#COPY logging /build/logging/
|
||||
COPY queryable /build/queryable/
|
||||
COPY web /build/web/
|
||||
|
||||
|
||||
|
||||
WORKDIR /build/
|
||||
RUN mvn package
|
||||
RUN mvn package -q
|
||||
|
||||
|
||||
|
||||
FROM openjdk:8-jre-alpine
|
||||
FROM amazoncorretto:11
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=MAVEN_BUILD /build/web/target/web-1.0-SNAPSHOT.jar /app.jar
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.util.Properties;
|
|||
*/
|
||||
@Configuration
|
||||
@EnableTransactionManagement
|
||||
@Profile({ "production", "staging" })
|
||||
@Profile({ "production", "staging", "docker" })
|
||||
@ComponentScan(basePackages = {"eu.eudat.data.entities"})
|
||||
public class ProductionDatabaseConfiguration {
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import java.util.LinkedList;
|
|||
import java.util.List;
|
||||
|
||||
@Service("dynamicFunderConfiguration")
|
||||
@Profile({ "production", "staging" })
|
||||
@Profile({ "production", "staging", "docker" })
|
||||
public class DynamicFunderConfigurationProdImpl implements DynamicFunderConfiguration {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DynamicFunderConfigurationProdImpl.class);
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import java.util.List;
|
|||
* Created by ikalyvas on 3/23/2018.
|
||||
*/
|
||||
@Service("dynamicGrantConfiguration")
|
||||
@Profile({ "production", "staging" })
|
||||
@Profile({ "production", "staging", "docker" })
|
||||
public class DynamicGrantConfigurationProdImpl implements DynamicGrantConfiguration {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DynamicGrantConfigurationProdImpl.class);
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ import java.util.LinkedList;
|
|||
import java.util.List;
|
||||
|
||||
@Service("dynamicProjectConfiguration")
|
||||
@Profile({ "production", "staging" })
|
||||
@Profile({ "production", "staging", "docker" })
|
||||
public class DynamicProjectConfigurationProdImpl implements DynamicProjectConfiguration{
|
||||
private static final Logger logger = LoggerFactory.getLogger(DynamicProjectConfigurationProdImpl.class);
|
||||
|
||||
|
|
|
@ -85,6 +85,7 @@ public class UploadData extends FieldData<UploadData> {
|
|||
this.setLabel(((Map<String, String>) data).get("label"));
|
||||
Object maxFileSizeInMB = ((Map<String, Object>) data).get("maxFileSizeInMB");
|
||||
if(maxFileSizeInMB instanceof String){ // template export
|
||||
if(!((String)maxFileSizeInMB).isEmpty())
|
||||
this.setMaxFileSizeInMB(Integer.valueOf((String)maxFileSizeInMB));
|
||||
}
|
||||
else if(maxFileSizeInMB instanceof Integer){ // template preview
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
dmp.domain = https://devel.opendmp.eu
|
||||
dmp.domain = http://localhost:8080
|
||||
|
||||
####################PERSISTENCE OVERRIDES CONFIGURATIONS##########
|
||||
database.url=jdbc:postgresql://dmp-db:5432/dmptool
|
||||
|
@ -6,40 +6,58 @@ database.username=dmptool
|
|||
database.password=CHANGEME
|
||||
|
||||
####################ELASTIIC SEARCH TAGS OVERRIDES CONFIGURATIONS##########
|
||||
elasticsearch.host = tags-elastic-search
|
||||
elasticsearch.host = opendmp-elastic
|
||||
elasticsearch.port = 9200
|
||||
elasticsearch.username=elastic
|
||||
elasticsearch.password=
|
||||
elasticsearch.index=dmps
|
||||
elasticsearch.usingssl=false
|
||||
elasticsearch.certPath=
|
||||
elasticsearch.certKey=
|
||||
|
||||
####################ELK OVERRIDES CONFIGURATIONS##########
|
||||
#http-logger.server-address = http://logstash:31311
|
||||
|
||||
####################PDF OVERRIDES CONFIGURATIONS##########
|
||||
pdf.converter.url=http://docsbox-web/
|
||||
pdf.converter.url=http://opendmp-pdf:3000/
|
||||
|
||||
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
|
||||
configuration.externalUrls=externalUrls/ExternalUrls.xml
|
||||
configuration.rda=RDACommonStandards.txt
|
||||
configuration.h2020template=documents/h2020.docx
|
||||
configuration.h2020datasettemplate=documents/h2020_dataset.docx
|
||||
configuration.configurable_login_providers=ConfigurableLoginProviders.json
|
||||
configuration.configurable_login_providers=configurableLoginProviders.json
|
||||
configuration.doi_funder=DOI_Funder.json
|
||||
|
||||
####################EMAIL FILE TEMPLATES OVERRIDES CONFIGURATIONS##########
|
||||
email.invite=classpath:templates/email/email.html
|
||||
email.confirmation=classpath:templates/email/emailConfirmation.html
|
||||
email.merge=classpath:templates/email/emailMergeConfirmation.html
|
||||
email.dataset.template=classpath:templates/email/emailAdmin.html
|
||||
|
||||
####################INVITATION MAIL CONFIGURATIONS##############
|
||||
####################GENERIC MAIL CONFIGURATIONS#################
|
||||
mail.subject=Invitation to DMP Plan {dmpname}
|
||||
mail.from=opendmp-dev@cite.gr
|
||||
|
||||
####################DATASET TEMPLATE MAIL CONFIGURATIONS#################
|
||||
admin.mail.subject=You have been invited to the Dataset Template {templateName}
|
||||
|
||||
####################SPRING MAIL CONFIGURATIONS#################
|
||||
spring.mail.default-encoding=UTF-8
|
||||
spring.mail.host=hermes.local.cite.gr
|
||||
spring.mail.host=
|
||||
spring.mail.username=
|
||||
spring.mail.password=
|
||||
spring.mail.port=25
|
||||
spring.mail.protocol=smtp
|
||||
spring.mail.test-connection=false
|
||||
spring.mail.properties.mail.smtp.auth=false
|
||||
spring.mail.properties.mail.smtp.starttls.enable=true
|
||||
|
||||
#############FACEBOOK LOGIN CONFIGURATIONS#########
|
||||
facebook.login.clientId=
|
||||
facebook.login.clientSecret=
|
||||
facebook.login.namespace=opendmp
|
||||
facebook.login.namespace=
|
||||
|
||||
#############GOOGLE LOGIN CONFIGURATIONS#########
|
||||
google.login.clientId=
|
||||
|
@ -47,7 +65,7 @@ google.login.clientId=
|
|||
#############LINKEDIN LOGIN CONFIGURATIONS#########
|
||||
linkedin.login.clientId=
|
||||
linkedin.login.clientSecret=
|
||||
linkedin.login.redirect_uri=https://devel.opendmp.eu/login/linkedin
|
||||
linkedin.login.redirect_uri=http://localhost:8080/login/linkedin
|
||||
linkedin.login.user_info_url=https://api.linkedin.com/v2/me
|
||||
linkedin.login.user_email=https://api.linkedin.com/v2/emailAddress?q=members&projection=(elements*(handle~))
|
||||
linkedin.login.access_token_url=https://www.linkedin.com/uas/oauth2/accessToken
|
||||
|
@ -55,16 +73,12 @@ linkedin.login.access_token_url=https://www.linkedin.com/uas/oauth2/accessToken
|
|||
#############TWITTER LOGIN CONFIGURATIONS#########
|
||||
twitter.login.clientId=
|
||||
twitter.login.clientSecret=
|
||||
twitter.login.redirect_uri=https://devel.opendmp.eu/login/twitter
|
||||
|
||||
#############CONFIRMATION EMAIL CONFIGURATIONS#########
|
||||
conf_email.expiration_time_seconds=14400
|
||||
conf_email.subject=OpenDMP email confirmation
|
||||
twitter.login.redirect_uri=http://localhost:8080/login/twitter
|
||||
|
||||
#############B2 ACCESS CONFIGURATIONS#########
|
||||
b2access.externallogin.user_info_url=https://b2access-integration.fz-juelich.de:443/oauth2/userinfo
|
||||
b2access.externallogin.access_token_url=https://b2access-integration.fz-juelich.de:443/oauth2/token
|
||||
b2access.externallogin.redirect_uri=https://devel.opendmp.eu/api/oauth/authorized/b2access
|
||||
b2access.externallogin.redirect_uri=http://localhost:8080/api/oauth/authorized/b2access
|
||||
b2access.externallogin.clientid=
|
||||
b2access.externallogin.clientSecret=
|
||||
|
||||
|
@ -72,20 +86,50 @@ b2access.externallogin.clientSecret=
|
|||
orcid.login.client_id=
|
||||
orcid.login.client_secret=
|
||||
orcid.login.access_token_url=https://orcid.org/oauth/token
|
||||
orcid.login.redirect_uri=https://opendmp.eu/login/external/orcid
|
||||
orcid.login.redirect_uri=http://localhost:8080/login/external/orcid
|
||||
|
||||
#############OPENAIRE CONFIGURATIONS#########
|
||||
openaire.login.client_id=
|
||||
openaire.login.client_secret=
|
||||
openaire.login.access_token_url=
|
||||
openaire.login.redirect_uri=
|
||||
openaire.login.user_info_url=
|
||||
openaire.login.access_token_url=https://aai.openaire.eu/oidc/token
|
||||
openaire.login.redirect_uri=http://localhost:8080/login/openaire
|
||||
openaire.login.user_info_url=https://aai.openaire.eu/oidc/userinfo
|
||||
|
||||
#############CONFIRMATION EMAIL CONFIGURATIONS#########
|
||||
conf_email.expiration_time_seconds=14400
|
||||
conf_email.subject=OpenDMP email confirmation
|
||||
|
||||
#############ZENODO CONFIGURATIONS#########
|
||||
zenodo.url=https://sandbox.zenodo.org/api/
|
||||
zenodo.access_token=
|
||||
zenodo.login.access_token_url=https://sandbox.zenodo.org/oauth/token
|
||||
zenodo.login.client_id=
|
||||
zenodo.login.client_secret=
|
||||
zenodo.login.redirect_uri=http://localhost:8080/login/external/zenodo
|
||||
|
||||
|
||||
#############CONTACT EMAIL CONFIGURATIONS#########
|
||||
contact_email.mail=
|
||||
|
||||
logging.config=classpath:logging/logback-${spring.profiles.active}.xml
|
||||
language.path=i18n/
|
||||
|
||||
##########################MISC##########################################
|
||||
#############USER GUIDE#########
|
||||
userguide.path=user-guide/
|
||||
|
||||
#############NOTIFICATION#########
|
||||
notification.rateInterval=30000
|
||||
notification.maxRetries=10
|
||||
notification.modified.subject=[OpenDMP] The {name} has been modified
|
||||
notification.publish.subject=[OpenDMP] The {name} has been published
|
||||
notification.finalised.subject=[OpenDMP] The {name} has been finalised
|
||||
notification.modifiedFinalised.subject=[OpenDMP] The {name} has been modified and finalised
|
||||
|
||||
#############TEMP#########
|
||||
temp.temp=tmp/
|
||||
file.storage=storage/
|
||||
spring.servlet.multipart.max-file-size=10MB
|
||||
spring.servlet.multipart.max-request-size=10MB
|
||||
|
||||
#############PROMETHEUS#########
|
||||
endpoints.prometheus.sensitive: false
|
|
@ -1,101 +0,0 @@
|
|||
dmp.domain = https://opendmp.eu
|
||||
|
||||
####################PERSISTENCE OVERRIDES CONFIGURATIONS##########
|
||||
database.url=jdbc:postgresql://dmp-db:5432/dmptool
|
||||
database.username=dmptool
|
||||
database.password=CHANGEME
|
||||
|
||||
####################ELASTIIC SEARCH TAGS OVERRIDES CONFIGURATIONS##########
|
||||
elasticsearch.host = tags-elastic-search
|
||||
elasticsearch.port = 9200
|
||||
elasticsearch.username=elastic
|
||||
elasticsearch.password=
|
||||
elasticsearch.index=dmps
|
||||
|
||||
####################PDF OVERRIDES CONFIGURATIONS##########
|
||||
pdf.converter.url=http://docsbox-web/
|
||||
|
||||
####################CONFIGURATION FILES OVERRIDES CONFIGURATIONS##########
|
||||
configuration.externalUrls=externalUrls/ExternalUrls.xml
|
||||
configuration.rda=RDACommonStandards.txt
|
||||
configuration.h2020template=documents/h2020.docx
|
||||
configuration.h2020datasettemplate=documents/h2020_dataset.docx
|
||||
configuration.configurable_login_providers=ConfigurableLoginProviders.json
|
||||
configuration.doi_funder=DOI_Funder.json
|
||||
|
||||
####################SPRING MAIL CONFIGURATIONS#################
|
||||
spring.mail.default-encoding=UTF-8
|
||||
spring.mail.host=
|
||||
spring.mail.username=
|
||||
spring.mail.password=
|
||||
spring.mail.port=25
|
||||
spring.mail.protocol=smtp
|
||||
spring.mail.test-connection=false
|
||||
spring.mail.properties.mail.smtp.auth=false
|
||||
spring.mail.properties.mail.smtp.starttls.enable=true
|
||||
|
||||
#############FACEBOOK LOGIN CONFIGURATIONS#########
|
||||
facebook.login.clientId=
|
||||
facebook.login.clientSecret=
|
||||
facebook.login.namespace=opendmp
|
||||
|
||||
#############GOOGLE LOGIN CONFIGURATIONS#########
|
||||
google.login.clientId=
|
||||
|
||||
#############LINKEDIN LOGIN CONFIGURATIONS#########
|
||||
linkedin.login.clientId=
|
||||
linkedin.login.clientSecret=
|
||||
linkedin.login.redirect_uri=https://opendmp.eu/login/linkedin
|
||||
linkedin.login.user_info_url=https://api.linkedin.com/v2/me
|
||||
linkedin.login.user_email=https://api.linkedin.com/v2/emailAddress?q=members&projection=(elements*(handle~))
|
||||
linkedin.login.access_token_url=https://www.linkedin.com/uas/oauth2/accessToken
|
||||
|
||||
#############TWITTER LOGIN CONFIGURATIONS#########
|
||||
twitter.login.clientId=
|
||||
twitter.login.clientSecret=
|
||||
twitter.login.redirect_uri=https://opendmp.eu/login/twitter
|
||||
|
||||
#############B2 ACCESS CONFIGURATIONS#########
|
||||
b2access.externallogin.user_info_url=https://b2access-integration.fz-juelich.de:443/oauth2/userinfo
|
||||
b2access.externallogin.access_token_url=https://b2access-integration.fz-juelich.de:443/oauth2/token
|
||||
b2access.externallogin.redirect_uri=https://opendmp.eu/api/oauth/authorized/b2access
|
||||
b2access.externallogin.clientid=
|
||||
b2access.externallogin.clientSecret=
|
||||
|
||||
#############ORCID CONFIGURATIONS#########
|
||||
orcid.login.client_id=
|
||||
orcid.login.client_secret=
|
||||
orcid.login.access_token_url=https://orcid.org/oauth/token
|
||||
orcid.login.redirect_uri=https://opendmp.eu/login/external/orcid
|
||||
|
||||
#############OPENAIRE CONFIGURATIONS#########
|
||||
openaire.login.client_id=
|
||||
openaire.login.client_secret=
|
||||
openaire.login.access_token_url=
|
||||
openaire.login.redirect_uri=
|
||||
openaire.login.user_info_url=
|
||||
|
||||
#############SPRING DATASOURCE CONFIGURATIONS#########
|
||||
spring.datasource.maxIdle: 10
|
||||
spring.datasource.max-active: 70
|
||||
spring.datasource.max-wait: 10000
|
||||
spring.datasource.validationQuery: select 1
|
||||
spring.datasource.removeAbandoned: true
|
||||
spring.datasource.removeAbandonedTimeout: 1
|
||||
spring.datasource.logAbandoned: true
|
||||
spring.datasource.testOnBorrow: true
|
||||
spring.datasource.testOnConnect: false
|
||||
spring.datasource.testWhileIdle: false
|
||||
|
||||
#############CONFIRMATION EMAIL CONFIGURATIONS#########
|
||||
conf_email.expiration_time_seconds=14400
|
||||
conf_email.subject=OpenDMP email confirmation
|
||||
|
||||
#############ZENODO CONFIGURATIONS#########
|
||||
zenodo.url=https://zenodo.org/api/
|
||||
zenodo.access_token=
|
||||
|
||||
#############CONTACT EMAIL CONFIGURATIONS#########
|
||||
contact_email.mail=
|
||||
|
||||
language.path=i18n/
|
|
@ -51,7 +51,7 @@ configuration.externalUrls=externalUrls/ExternalUrls.xml
|
|||
configuration.rda=RDACommonStandards.txt
|
||||
configuration.h2020template=documents/h2020.docx
|
||||
configuration.h2020datasettemplate=documents/h2020_dataset.docx
|
||||
configuration.configurable_login_providers=ConfigurableLoginProviders.json
|
||||
configuration.configurable_login_providers=configurableLoginProviders.json
|
||||
configuration.doi_funder=DOI_Funder.json
|
||||
|
||||
####################EMAIL FILE TEMPLATES OVERRIDES CONFIGURATIONS##########
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
<configuration debug="false">
|
||||
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<file>logs/openDMP.log</file>
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
||||
<fileNamePattern>logs/openDMP-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
|
||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
||||
<maxFileSize>100MB</maxFileSize>
|
||||
</timeBasedFileNamingAndTriggeringPolicy>
|
||||
<maxHistory>30</maxHistory>
|
||||
<totalSizeCap>3GB</totalSizeCap>
|
||||
</rollingPolicy>
|
||||
<encoder>
|
||||
<pattern>${FILE_LOG_PATTERN}</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<logger name="eu.eudat" level="INFO" additivity="false">
|
||||
<appender-ref ref="FILE"/>
|
||||
<appender-ref ref="STDOUT"/>
|
||||
</logger>
|
||||
|
||||
<root level="info" additivity="false">
|
||||
<appender-ref ref="FILE"/>
|
||||
<appender-ref ref="STDOUT"/>
|
||||
</root>
|
||||
</configuration>
|
|
@ -1,32 +1,29 @@
|
|||
# stage1 as builder
|
||||
FROM node:12-alpine AS BUILDER
|
||||
|
||||
WORKDIR /page
|
||||
WORKDIR /app
|
||||
|
||||
# copy the package.json to install dependencies
|
||||
COPY package.json /page
|
||||
# copy to install dependencies
|
||||
COPY . .
|
||||
|
||||
# Install the dependencies and make the folder
|
||||
RUN npm install
|
||||
|
||||
COPY . /page
|
||||
|
||||
# Build the project and copy the files
|
||||
RUN npm run ng build -- --prod
|
||||
RUN npm install && npm run ng build -- --prod
|
||||
|
||||
FROM nginx:alpine
|
||||
|
||||
#!/bin/sh
|
||||
|
||||
WORKDIR /usr/share/nginx/html
|
||||
|
||||
COPY nginx.conf /etc/nginx
|
||||
COPY mime.types /etc/nginx
|
||||
|
||||
## Remove default nginx index page
|
||||
RUN rm -rf /usr/share/nginx/html/*
|
||||
RUN rm -rf ./*
|
||||
|
||||
# Copy from the stahg 1
|
||||
COPY --from=BUILDER /page/dist /usr/share/nginx/html
|
||||
COPY --from=BUILDER /app/dist .
|
||||
|
||||
EXPOSE 4200
|
||||
|
||||
ENTRYPOINT ["nginx", "-g", "daemon off;", "-p", "/usr/share/nginx"]
|
||||
ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||
|
|
|
@ -9,6 +9,7 @@ types {
|
|||
text/x-component htc;
|
||||
text/mathml mml;
|
||||
image/png png;
|
||||
image/svg+xml svg svgz;
|
||||
image/x-icon ico;
|
||||
image/x-jng jng;
|
||||
image/vnd.wap.wbmp wbmp;
|
||||
|
|
|
@ -1,168 +0,0 @@
|
|||
---
|
||||
version: '3'
|
||||
services:
|
||||
##########################ELASTIC######################################################################
|
||||
elasticsearch-dmp:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch-dmp
|
||||
volumes:
|
||||
- ./elastic-config/elasticsearch-custom.yml:/usr/share/elasticsearch/config/elasticsearch.yml
|
||||
environment: ['http.host=0.0.0.0','transport.host=0.0.0.0','discovery.type=single-node']
|
||||
ports: ['0.0.0.0:9201:9200','0.0.0.0:9301:9300']
|
||||
networks: ['elasticsearch-dmp']
|
||||
volumes:
|
||||
- esdata-dmp:/usr/share/elasticsearch/data
|
||||
|
||||
##########################ELK-STACK######################################################################
|
||||
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${TAG}
|
||||
container_name: elasticsearch
|
||||
environment: ['http.host=0.0.0.0', 'transport.host=127.0.0.1', 'ELASTIC_PASSWORD=${ELASTIC_PASSWORD}','discovery.type=single-node']
|
||||
ports: ['0.0.0.0:9200:9200']
|
||||
networks: ['stack']
|
||||
volumes:
|
||||
- esdata:/usr/share/elasticsearch/data
|
||||
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:${TAG}
|
||||
container_name: kibana
|
||||
ports: ['0.0.0.0:5601:5601']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${TAG}
|
||||
container_name: logstash
|
||||
volumes:
|
||||
- ./ELK.Docker/config/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
|
||||
ports: ['0.0.0.0:31311:31311']
|
||||
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch', 'setup_logstash']
|
||||
|
||||
#filebeat:
|
||||
# image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
# container_name: filebeat
|
||||
# command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# networks: ['stack']
|
||||
# depends_on: ['elasticsearch', 'setup_filebeat']
|
||||
|
||||
#heartbeat:
|
||||
# image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
# container_name: heartbeat
|
||||
# command: -e -E 'output.elasticsearch.password=${ELASTIC_PASSWORD}'
|
||||
# networks: ['stack']
|
||||
# depends_on: ['elasticsearch', 'setup_heartbeat']
|
||||
|
||||
# Run a short-lived container to set up Logstash.
|
||||
setup_logstash:
|
||||
image: centos:7
|
||||
container_name: setup_logstash
|
||||
volumes: ['./ELK.Docker/scripts/setup-logstash.sh:/usr/local/bin/setup-logstash.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-logstash.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
setup_kibana:
|
||||
image: centos:7
|
||||
container_name: setup_kibana
|
||||
volumes: ['./ELK.Docker/scripts/setup-kibana.sh:/usr/local/bin/setup-kibana.sh:ro']
|
||||
command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-kibana.sh | tr -d "\r" | bash']
|
||||
environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
networks: ['stack']
|
||||
depends_on: ['elasticsearch']
|
||||
|
||||
#setup_filebeat:
|
||||
# image: docker.elastic.co/beats/filebeat:${TAG}
|
||||
# container_name: setup_filebeat
|
||||
# volumes: ['./ELK.Docker/scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
# command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s filebeat']
|
||||
# environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
# networks: ['stack']
|
||||
# depends_on: ['kibana']
|
||||
|
||||
#setup_heartbeat:
|
||||
# image: docker.elastic.co/beats/heartbeat:${TAG}
|
||||
# container_name: setup_heartbeat
|
||||
# volumes: ['./ELK.Docker/scripts/setup-beat.sh:/usr/local/bin/setup-beat.sh:ro']
|
||||
# command: ['/bin/bash', '-c', 'cat /usr/local/bin/setup-beat.sh | tr -d "\r" | bash -s heartbeat']
|
||||
# environment: ['ELASTIC_PASSWORD=${ELASTIC_PASSWORD}']
|
||||
# networks: ['stack']
|
||||
# depends_on: ['kibana']
|
||||
|
||||
##########################DOCSBOX######################################################################
|
||||
# web:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# expose:
|
||||
# - "8000"
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - docsbox:/home/docsbox
|
||||
# - media:/home/docsbox/media
|
||||
# command: gunicorn -b :8000 docsbox:app
|
||||
# networks: ['stack']
|
||||
|
||||
# rqworker:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rq worker -c docsbox.settings
|
||||
# networks: ['stack']
|
||||
|
||||
# rqscheduler:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/docsbox
|
||||
# links:
|
||||
# - redis:redis
|
||||
# volumes:
|
||||
# - web
|
||||
# command: rqscheduler -H redis -p 6379 -d 0
|
||||
# networks: ['stack']
|
||||
|
||||
# nginx:
|
||||
# restart: always
|
||||
# build: ./docsbox-master/nginx/
|
||||
# ports:
|
||||
# - "81:80"
|
||||
# volumes:
|
||||
# - web
|
||||
# links:
|
||||
# - web:web
|
||||
# networks: ['stack']
|
||||
|
||||
# redis:
|
||||
# restart: always
|
||||
# image: redis:latest
|
||||
# expose:
|
||||
# - "6379"
|
||||
# volumes:
|
||||
# - redisdata:/data
|
||||
# networks: ['stack']
|
||||
|
||||
|
||||
##########################SETTINGS######################################################################
|
||||
|
||||
volumes:
|
||||
esdata:
|
||||
driver: local
|
||||
esdata-dmp:
|
||||
driver: local
|
||||
#redisdata:
|
||||
# driver: local
|
||||
# docsbox:
|
||||
# driver: local
|
||||
# media:
|
||||
# driver: local
|
||||
networks:
|
||||
stack: {}
|
||||
elasticsearch-dmp: {}
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
version: "3.8"
|
||||
|
||||
services:
|
||||
elasticsearch:
|
||||
user: 1002:1002 #develuser
|
||||
restart: unless-stopped
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- cluster.name=open-dmp-cluster
|
||||
- bootstrap.memory_lock=true
|
||||
- "ES_JAVA_OPTS=-Xmx1024m -Xms1024m"
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.monitoring.collection.enabled=true
|
||||
- xpack.security.enabled=true
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- ./ELK.Docker/shared/config-elk/elasticsearch/config/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- ./ELK.Docker/shared/config-elk/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
||||
- ./ELK.Docker/shared/data-elk/elasticsearch-01-data:/usr/share/elasticsearch/data
|
||||
- ./ELK.Docker/shared/data-elk/elasticsearch-01-log:/usr/share/elasticsearch/logs
|
||||
#ports:
|
||||
# - 51056:9200
|
||||
# - 51057:9300
|
||||
ports:
|
||||
- "9201:9200"
|
||||
expose:
|
||||
- "9200"
|
||||
hostname: opendmp-elastic
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
aliases:
|
||||
- opendmp-elastic
|
||||
|
||||
logstash:
|
||||
# user: 1002:1002 #develuser
|
||||
volumes:
|
||||
- ./ELK.Docker/shared/config-elk/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro
|
||||
- ./ELK.Docker/shared/config-elk/logstash/config/pipelines.yml:/usr/share/logstash/config/pipelines.yml:ro
|
||||
- ./ELK.Docker/shared/config-elk/logstash/config/log4j2.properties:/usr/share/logstash/config/log4j2.properties:ro
|
||||
- ./ELK.Docker/shared/config-elk/logstash/pipeline:/usr/share/logstash/pipeline:ro
|
||||
- ./ELK.Docker/shared/config-elk/logstash/logstash/templates:/usr/share/logstash/templates
|
||||
- ./ELK.Docker/shared/data-elk/logstash-log:/usr/share/logstash/logs
|
||||
- ./ELK.Docker/shared/data-elk/logstash-queue:/usr/share/logstash/queue
|
||||
- ./ELK.Docker/shared/data-elk/logstash-dead_letter_queue:/usr/share/logstash/dead_letter_queue
|
||||
expose:
|
||||
- "31311"
|
||||
- "31312"
|
||||
restart: on-failure
|
||||
mem_limit: 2048m
|
||||
environment:
|
||||
- LS_JAVA_OPTS=-Xmx1024m -Xms1024m
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
networks:
|
||||
open-dmp-elk-network:
|
||||
|
||||
kibana:
|
||||
# user: 1002:1002 #develuser
|
||||
mem_limit: 512m
|
||||
environment:
|
||||
- xpack.license.self_generated.type=basic
|
||||
- xpack.security.enabled=true
|
||||
|
||||
volumes:
|
||||
- ./ELK.Docker/shared/config-elk/kibana/config:/usr/share/kibana/config:ro
|
||||
#- ./ELK.Docker/shared/config-elk/kibana/certificates:/usr/share/kibana/certificates
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "51058:5601"
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
filebeat:
|
||||
restart: unless-stopped
|
||||
mem_limit: 256m
|
||||
#command: [ "-e=false" ] # to overwrite the -e that disables logging to file!
|
||||
volumes:
|
||||
- ./ELK.Docker/shared/config-elk/filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro
|
||||
- ./openDMP/logs:/usr/share/filebeat/log_data/dmp/
|
||||
- ./ELK.Docker/shared/data-elk/filebeat-log:/usr/share/filebeat/logs
|
||||
- ./ELK.Docker/shared/data-elk/filebeat-data:/usr/share/filebeat/data #For windows if we mount the data directory we get "Writing of registry returned error: sync /usr/share/filebeat/data/registry/filebeat: invalid argument."
|
||||
networks:
|
||||
- open-dmp-elk-network
|
||||
|
||||
networks:
|
||||
open-dmp-elk-network:
|
|
@ -1,4 +1,4 @@
|
|||
version: "2.4"
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
dmp-db:
|
||||
|
@ -18,6 +18,20 @@ services:
|
|||
networks:
|
||||
- opendmp-backend-network
|
||||
|
||||
dmp-pdf-converter:
|
||||
image: gotenberg/gotenberg:7.4.0
|
||||
container_name: opendmp-pdf-converter
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3001:3000"
|
||||
expose:
|
||||
- "3000"
|
||||
hostname: opendmp-pdf
|
||||
networks:
|
||||
opendmp-pdf-network:
|
||||
aliases:
|
||||
- opendmp-pdf
|
||||
|
||||
dmp-backend:
|
||||
build:
|
||||
context: ./dmp-backend
|
||||
|
@ -27,18 +41,22 @@ services:
|
|||
mem_limit: 2048m
|
||||
ports:
|
||||
- "8081:8081"
|
||||
expose:
|
||||
- "8080"
|
||||
networks:
|
||||
- opendmp-backend-network
|
||||
- opendmp-pdf-network
|
||||
- open-dmp-elk-network
|
||||
volumes:
|
||||
- ./openDMP/dmp-backend/config:/app/config
|
||||
- ./openDMP/dmp-backend/user-guide:/app/user-guide
|
||||
- ./openDMP/dmp-backend/i18n:/app/i18n
|
||||
- ./openDMP/dmp-backend/externalUrls:/app/externalUrls
|
||||
- ./openDMP/dmp-backend/templates:/app/templates
|
||||
- ./dmp-backend/web/main/resources/config:/app/config
|
||||
- ./user-guide:/app/user-guide
|
||||
- ./dmp-frontend/src/assets/i18n:/app/i18n
|
||||
- ./dmp-backend/web/main/resources/externalUrls:/app/externalUrls
|
||||
- ./dmp-backend/web/main/resources/templates:/app/templates
|
||||
- ./openDMP/dmp-backend/opendmp-logs:/app/logs
|
||||
- ./openDMP/dmp-backend/tmp:/app/tmp
|
||||
- ./openDMP/dmp-backend/logging:/app/logging
|
||||
- ./openDMP/dmp-backend/documents:/app/documents
|
||||
- ./dmp-backend/web/main/resources/logging:/app/logging
|
||||
- ./dmp-backend/web/main/resources/documents:/app/documents
|
||||
|
||||
dmp-frontend:
|
||||
build:
|
||||
|
@ -50,13 +68,55 @@ services:
|
|||
- "8080:4200"
|
||||
volumes:
|
||||
- ./openDMP/dmp-frontend/static-files:/usr/share/nginx/static
|
||||
- ./openDMP/dmp-frontend/webapp/config:/usr/share/nginx/html/assets/config
|
||||
- ./dmp-frontend/src/assets/config:/usr/share/nginx/html/assets/config
|
||||
networks:
|
||||
- opendmp-frontend-network
|
||||
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
|
||||
container_name: elasticsearch
|
||||
build:
|
||||
context: ./ELK.Docker/elasticsearch/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
healthcheck:
|
||||
# test: curl --cacert /usr/share/elasticsearch/config/certificates/ca/ca.crt -s https://localhost:9200 >/dev/null; if [[ $$? == 52 ]]; then echo 0; else echo 1; fi
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
logstash:
|
||||
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
|
||||
container_name: logstash
|
||||
build:
|
||||
context: ./ELK.Docker/logstash/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
|
||||
build:
|
||||
context: ./ELK.Docker/kibana/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- elasticsearch
|
||||
filebeat:
|
||||
image: docker.elastic.co/filebeat/filebeat:${STACK_VERSION}
|
||||
build:
|
||||
context: ./ELK.Docker/filebeat/
|
||||
args:
|
||||
ELK_VERSION: $ELK_VERSION
|
||||
depends_on:
|
||||
- logstash
|
||||
|
||||
networks:
|
||||
opendmp-frontend-network:
|
||||
opendmp-backend-network:
|
||||
opendmp-pdf-network:
|
||||
open-dmp-elk-network:
|
||||
volumes:
|
||||
dmpdata:
|
||||
external: true
|
||||
|
|
Loading…
Reference in New Issue