wf exec client + fix nginx conf

This commit is contained in:
Michele Artini 2023-09-26 11:53:18 +02:00
parent 7185d038ee
commit 3c5e1682ee
14 changed files with 456 additions and 106 deletions

View File

@ -102,22 +102,6 @@ CREATE INDEX ON context_cat_concepts_lvl_0 (parent);
CREATE INDEX ON context_cat_concepts_lvl_1 (parent);
CREATE INDEX ON context_cat_concepts_lvl_2 (parent);
-- WF History
CREATE TABLE wf_history (
process_id text PRIMARY KEY,
wf_conf_id text NOT NULL,
name text NOT NULL,
family text NOT NULL,
status text NOT NULL,
start_date timestamp NOT NULL,
end_date timestamp NOT NULL,
ds_id text,
ds_name text,
ds_api text,
details jsonb
);
-- Other Resources
CREATE TABLE resource_types(
@ -260,48 +244,3 @@ CREATE TABLE emails (
message text NOT NULL
);
-- Workflows
CREATE TABLE wf_sections (
id text PRIMARY KEY,
name text NOT NULL
);
INSERT INTO wf_sections(id, name) VALUES
('GC', 'Garbage Collection'),
('CONSISTENCY', 'InfoSpace Consistency'),
('DEDUP', 'InfoSpace Deduplication'),
('INFERENCE', 'InfoSpace Inference'),
('MONITOR', 'InfoSpace Monitoring'),
('PROVISION', 'InfoSpace Provision'),
('IS', 'Information Service'),
('BROKER', 'Notification Broker');
CREATE TABLE wf_configurations (
id text PRIMARY KEY,
name text NOT NULL,
section text REFERENCES wf_sections(id),
details jsonb NOT NULL DEFAULT '{}',
priority int,
dsid text,
dsname text,
apiid text,
enabled boolean NOT NULL DEFAULT false,
configured boolean NOT NULL DEFAULT false,
scheduling_enabled boolean NOT NULL DEFAULT false,
scheduling_cron text,
scheduling_min_interval int,
workflow text REFERENCES resources(id),
destroy_wf text REFERENCES resources(id),
system_params jsonb NOT NULL DEFAULT '{}',
user_params jsonb NOT NULL DEFAULT '{}'
);
CREATE TABLE wf_subscriptions (
wf_conf_id text NOT NULL REFERENCES wf_configurations(id),
condition text NOT NULL,
email text NOT NULL,
message_id text NOT NULL REFERENCES emails(id),
PRIMARY KEY (wf_conf_id, condition, email)
);

61
data/sql/wfs.sql Normal file
View File

@ -0,0 +1,61 @@
-- WF History
CREATE TABLE wf_history (
process_id text PRIMARY KEY,
wf_conf_id text NOT NULL,
name text NOT NULL,
family text NOT NULL,
status text NOT NULL,
start_date timestamp NOT NULL,
end_date timestamp NOT NULL,
ds_id text,
ds_name text,
ds_api text,
details jsonb
);
-- Workflows
CREATE TABLE wf_sections (
id text PRIMARY KEY,
name text NOT NULL
);
INSERT INTO wf_sections(id, name) VALUES
('GC', 'Garbage Collection'),
('CONSISTENCY', 'InfoSpace Consistency'),
('DEDUP', 'InfoSpace Deduplication'),
('INFERENCE', 'InfoSpace Inference'),
('MONITOR', 'InfoSpace Monitoring'),
('PROVISION', 'InfoSpace Provision'),
('IS', 'Information Service'),
('BROKER', 'Notification Broker');
CREATE TABLE wf_configurations (
id text PRIMARY KEY,
name text NOT NULL,
section text REFERENCES wf_sections(id),
details jsonb NOT NULL DEFAULT '{}',
priority int,
dsid text,
dsname text,
apiid text,
enabled boolean NOT NULL DEFAULT false,
configured boolean NOT NULL DEFAULT false,
scheduling_enabled boolean NOT NULL DEFAULT false,
scheduling_cron text,
scheduling_min_interval int,
workflow text REFERENCES resources(id),
destroy_wf text REFERENCES resources(id),
system_params jsonb NOT NULL DEFAULT '{}',
user_params jsonb NOT NULL DEFAULT '{}'
);
CREATE TABLE wf_subscriptions (
wf_conf_id text NOT NULL REFERENCES wf_configurations(id),
condition text NOT NULL,
email text NOT NULL,
message_id text NOT NULL REFERENCES emails(id),
PRIMARY KEY (wf_conf_id, condition, email)
);

307
data/sql/wfs.sql~ Normal file
View File

@ -0,0 +1,307 @@
-- Vocabularies
CREATE TABLE vocabularies (
id text PRIMARY KEY,
name text NOT NULL,
description text
);
CREATE TABLE vocabulary_terms (
vocabulary text NOT NULL REFERENCES vocabularies(id) ON UPDATE CASCADE ON DELETE CASCADE,
code text NOT NULL,
name text NOT NULL,
encoding text DEFAULT 'OPENAIRE',
synonyms jsonb,
PRIMARY KEY (vocabulary, code)
);
CREATE INDEX ON vocabulary_terms (vocabulary);
CREATE TABLE protocols (
id text PRIMARY KEY
);
INSERT INTO protocols(id) VALUES ('oai'),('oai_sets'),('http'),('file'),('classpath'),('fileCSV'),('httpCSV'),('ftp'),('sftp'),('filesystem'),('files_from_metadata'),('files_from_mdstore'),('mongoDump'),('targz'),('zip'),('fileGzip'),('httpList'),('remoteMdstore');
CREATE TABLE protocol_params (
protocol text NOT NULL REFERENCES protocols(id) ON UPDATE CASCADE ON DELETE CASCADE,
param_name text NOT NULL,
param_label text NOT NULL,
param_type text NOT NULL DEFAULT 'TEXT',
optional boolean NOT NULL default false,
has_sel_function boolean NOT NULL default false,
PRIMARY KEY (protocol, param_name)
);
INSERT INTO protocol_params(protocol, param_name, param_label, param_type, optional, has_sel_function) VALUES
('oai', 'set', 'OAI set', 'LIST', true, true),
('oai', 'format', 'OAI Metadata Format', 'TEXT', false, false),
('http', 'splitOnElement', 'splitOnElement', 'TEXT', false, false),
('file', 'splitOnElement', 'splitOnElement', 'TEXT', false, false),
('classpath', 'splitOnElement', 'splitOnElement', 'TEXT', false, false),
('fileCSV', 'header', 'header', 'TEXT', false, false),
('fileCSV', 'separator', 'separator', 'TEXT', false, false),
('fileCSV', 'identifier', 'identifier', 'TEXT', false, false),
('fileCSV', 'quote', 'quote', 'TEXT', false, false),
('httpCSV', 'separator', 'separator', 'TEXT', false, false),
('httpCSV', 'identifier', 'identifier', 'TEXT', false, false),
('httpCSV', 'quote', 'quote', 'TEXT', false, false),
('ftp', 'username', 'username', 'TEXT', false, false),
('ftp', 'password', 'password', 'TEXT', false, false),
('ftp', 'recursive', 'recursive', 'BOOLEAN', false, false),
('ftp', 'extensions', 'extensions', 'LIST', false, false),
('sftp', 'username', 'username', 'TEXT', false, false),
('sftp', 'password', 'password', 'TEXT', true, false),
('sftp', 'authMethod', 'authMethod', 'TEXT', true, false),
('sftp', 'privateKeyPath', 'privateKeyPath', 'TEXT', true, false),
('sftp', 'port', 'port', 'TEXT', true, false),
('sftp', 'recursive', 'recursive', 'BOOLEAN', false, false),
('sftp', 'extensions', 'extensions', 'LIST', false, false);
-- Contexts
CREATE TABLE contexts (
id text PRIMARY KEY,
label text NOT NULL,
type text NOT NULL,
params jsonb
);
CREATE TABLE context_categories (
id text NOT NULL PRIMARY KEY,
parent text NOT NULL REFERENCES contexts(id) ON UPDATE CASCADE ON DELETE CASCADE,
label text NOT NULL,
claim boolean NOT NULL,
params jsonb
);
CREATE TABLE context_cat_concepts_lvl_0 (
id text NOT NULL PRIMARY KEY,
parent text NOT NULL REFERENCES context_categories(id) ON UPDATE CASCADE ON DELETE CASCADE,
label text NOT NULL,
claim boolean NOT NULL,
params jsonb
);
CREATE TABLE context_cat_concepts_lvl_1 (
id text NOT NULL PRIMARY KEY,
parent text NOT NULL REFERENCES context_cat_concepts_lvl_0(id) ON UPDATE CASCADE ON DELETE CASCADE,
label text NOT NULL,
claim boolean NOT NULL,
params jsonb
);
CREATE TABLE context_cat_concepts_lvl_2 (
id text NOT NULL PRIMARY KEY,
parent text NOT NULL REFERENCES context_cat_concepts_lvl_1(id) ON UPDATE CASCADE ON DELETE CASCADE,
label text NOT NULL,
claim boolean NOT NULL,
params jsonb
);
CREATE INDEX ON context_categories (parent);
CREATE INDEX ON context_cat_concepts_lvl_0 (parent);
CREATE INDEX ON context_cat_concepts_lvl_1 (parent);
CREATE INDEX ON context_cat_concepts_lvl_2 (parent);
-- WF History
CREATE TABLE wf_history (
process_id text PRIMARY KEY,
wf_conf_id text NOT NULL,
name text NOT NULL,
family text NOT NULL,
status text NOT NULL,
start_date timestamp NOT NULL,
end_date timestamp NOT NULL,
ds_id text,
ds_name text,
ds_api text,
details jsonb
);
-- Other Resources
CREATE TABLE resource_types(
id text PRIMARY KEY,
name text NOT NULL,
content_type text NOT NULL DEFAULT 'text/plain'
);
INSERT INTO resource_types(id, name, content_type) VALUES
('transformation_rule_xslt', 'Transformation Rules (xslt)', 'application/xml'),
('transformation_rule_legacy', 'Transformation Rules (legacy)', 'text/plain'),
('cleaning_rule', 'Cleaning Rules', 'application/xml'),
('hadoop_job_configuration', 'Hadoop Job Configurations', 'application/xml'),
('dedup_configuration', 'Dedup Configurations', 'application/json'),
('wf_template', 'Workflow Templates', 'application/json');
CREATE TABLE resources (
id text PRIMARY KEY,
name text NOT NULL,
description text,
content text NOT NULL DEFAULT '',
type text NOT NULL REFERENCES resource_types(id),
subtype text,
creation_date timestamp NOT NULL DEFAULT now(),
modification_date timestamp NOT NULL DEFAULT now()
);
CREATE VIEW resource_types_view AS (
SELECT
t.id AS id,
t.name AS name,
t.content_type AS content_type,
count(r.id) AS count,
true AS simple
FROM resource_types t
LEFT OUTER JOIN resources r ON (r.type = t.id)
GROUP BY t.id, t.name
ORDER BY t.name
) UNION ALL (
SELECT
'vocabulary' AS id,
'Vocabularies' AS name,
'text/plain' AS content_type,
count(*) AS count,
false AS simple
FROM vocabularies
) UNION ALL (
SELECT
'context' AS id,
'Contexts' AS name,
'text/plain' AS content_type,
count(*) AS count,
false AS simple
FROM contexts
) UNION ALL (
SELECT
'protocol' AS id,
'Protocols' AS name,
'text/plain' AS content_type,
count(*) AS count,
false AS simple
FROM protocols
) UNION ALL (
SELECT
'email' AS id,
'Email templates' AS name,
'text/plain' AS content_type,
count(*) AS count,
false AS simple
FROM emails
);
CREATE TABLE mdstores (
id text PRIMARY KEY,
format text NOT NULL,
layout text NOT NULL,
interpretation text NOT NULL,
type text NOT NULL,
datasource_name text,
datasource_id text,
api_id text,
creation_date timestamp NOT NULL DEFAULT now(),
params jsonb
);
CREATE TABLE mdstore_versions (
id text PRIMARY KEY,
mdstore text NOT NULL REFERENCES mdstores(id),
writing boolean NOT NULL,
readcount int NOT NULL DEFAULT 0,
lastupdate timestamp NOT NULL DEFAULT now(),
size bigint NOT NULL DEFAULT 0,
params jsonb
);
CREATE TABLE mdstore_current_versions (
mdstore text PRIMARY KEY REFERENCES mdstores(id),
current_version text NOT NULL REFERENCES mdstore_versions(id)
);
CREATE VIEW mdstores_with_info AS SELECT
md.id AS id,
md.format AS format,
md.layout AS layout,
md.type AS type,
md.interpretation AS interpretation,
md.datasource_name AS datasource_name,
md.datasource_id AS datasource_id,
md.api_id AS api_id,
md.params AS params,
md.creation_date as creation_date,
cv.current_version AS current_version,
v1.lastupdate AS lastupdate,
v1.size AS size,
count(v2.id) AS n_versions
FROM
mdstores md
LEFT OUTER JOIN mdstore_current_versions cv ON (md.id = cv.mdstore)
LEFT OUTER JOIN mdstore_versions v1 ON (cv.current_version = v1.id)
LEFT OUTER JOIN mdstore_versions v2 ON (md.id = v2.mdstore)
GROUP BY md.id,
md.format,
md.layout,
md.interpretation,
md.type,
md.datasource_name,
md.datasource_id,
md.params,
md.creation_date,
md.api_id,
cv.current_version,
v1.lastupdate,
v1.size;
-- Email Templates
CREATE TABLE emails (
id text PRIMARY KEY,
description text NOT NULL,
subject text NOT NULL,
message text NOT NULL
);
-- Workflows
CREATE TABLE wf_sections (
id text PRIMARY KEY,
name text NOT NULL
);
INSERT INTO wf_sections(id, name) VALUES
('GC', 'Garbage Collection'),
('CONSISTENCY', 'InfoSpace Consistency'),
('DEDUP', 'InfoSpace Deduplication'),
('INFERENCE', 'InfoSpace Inference'),
('MONITOR', 'InfoSpace Monitoring'),
('PROVISION', 'InfoSpace Provision'),
('IS', 'Information Service'),
('BROKER', 'Notification Broker');
CREATE TABLE wf_configurations (
id text PRIMARY KEY,
name text NOT NULL,
section text REFERENCES wf_sections(id),
details jsonb NOT NULL DEFAULT '{}',
priority int,
dsid text,
dsname text,
apiid text,
enabled boolean NOT NULL DEFAULT false,
configured boolean NOT NULL DEFAULT false,
scheduling_enabled boolean NOT NULL DEFAULT false,
scheduling_cron text,
scheduling_min_interval int,
workflow text REFERENCES resources(id),
destroy_wf text REFERENCES resources(id),
system_params jsonb NOT NULL DEFAULT '{}',
user_params jsonb NOT NULL DEFAULT '{}'
);
CREATE TABLE wf_subscriptions (
wf_conf_id text NOT NULL REFERENCES wf_configurations(id),
condition text NOT NULL,
email text NOT NULL,
message_id text NOT NULL REFERENCES emails(id),
PRIMARY KEY (wf_conf_id, condition, email)
);

View File

@ -1,10 +1,8 @@
package eu.dnetlib.storage;
import org.springframework.stereotype.Service;
import org.springframework.stereotype.Component;
import eu.dnetlib.wfs.clients.MDStoreClient;
@Service
public class DbMdStoreClient extends MDStoreClient {
@Component
public class DbMdStoreClient {
}

View File

@ -13,11 +13,20 @@ import eu.dnetlib.common.app.AbstractDnetApp;
import eu.dnetlib.common.mapping.cleaner.CleanerFactory;
import eu.dnetlib.common.mapping.xslt.DnetXsltFunction;
import eu.dnetlib.common.mapping.xslt.XsltTransformFactory;
import eu.dnetlib.domain.wfs.WfHistoryEntry;
import eu.dnetlib.domain.wfs.WorkflowConfiguration;
import eu.dnetlib.domain.wfs.WorkflowSection;
import eu.dnetlib.domain.wfs.WorkflowSubscription;
import eu.dnetlib.wfs.clients.SimpleResourceClient;
import eu.dnetlib.wfs.clients.VocabularyClient;
@SpringBootApplication
@EntityScan(basePackageClasses = {})
@EntityScan(basePackageClasses = {
WfHistoryEntry.class,
WorkflowConfiguration.class,
WorkflowSection.class,
WorkflowSubscription.class
})
public class WfExecutorApplication extends AbstractDnetApp {
@Autowired

View File

@ -17,7 +17,7 @@ import eu.dnetlib.domain.common.KeyValue;
import eu.dnetlib.domain.wfs.WorkflowConfiguration;
import eu.dnetlib.domain.wfs.WorkflowSection;
import eu.dnetlib.domain.wfs.WorkflowSubscription;
import eu.dnetlib.wfs.manager.client.WfExecutorClient;
import eu.dnetlib.wfs.clients.WfExecutorClient;
import eu.dnetlib.wfs.manager.service.WorkflowManagerService;
import eu.dnetlib.wfs.procs.ExecutionStatus;

View File

@ -13,7 +13,7 @@ import org.springframework.stereotype.Service;
import eu.dnetlib.domain.wfs.WfHistoryEntry;
import eu.dnetlib.domain.wfs.WorkflowConfiguration;
import eu.dnetlib.wfs.manager.client.WfExecutorClient;
import eu.dnetlib.wfs.clients.WfExecutorClient;
import eu.dnetlib.wfs.procs.ProcessRegistry;
import eu.dnetlib.wfs.procs.WorkflowProcess;
import eu.dnetlib.wfs.repository.WorkflowConfigurationRepository;

View File

@ -10,12 +10,12 @@ import org.springframework.beans.factory.annotation.Autowired;
import eu.dnetlib.domain.wfs.WorkflowConfiguration;
import eu.dnetlib.wfs.annotations.WfInputParam;
import eu.dnetlib.wfs.annotations.WfNode;
import eu.dnetlib.wfs.clients.WfExecutorClient;
import eu.dnetlib.wfs.nodes.ProcessNode;
import eu.dnetlib.wfs.procs.ExecutionStatus;
import eu.dnetlib.wfs.procs.ProcessAware;
import eu.dnetlib.wfs.procs.Token;
import eu.dnetlib.wfs.procs.WorkflowProcess;
import eu.dnetlib.wfs.service.WfExecutorService;
import eu.dnetlib.wfs.utils.NodeCallback;
import eu.dnetlib.wfs.utils.ProcessCallback;
@ -28,7 +28,7 @@ public class LaunchWorkflowJobNode extends ProcessNode implements ProcessAware {
private String wfId;
@Autowired
private WfExecutorService wfExecutorService;
private WfExecutorClient wfExecutorClient;
private WorkflowProcess process;
@ -53,7 +53,7 @@ public class LaunchWorkflowJobNode extends ProcessNode implements ProcessAware {
conf.setSystemParams(process.getGlobalParams());
conf.setUserParams(new HashMap<>());
final ExecutionStatus info = wfExecutorService.startWorkflow(wfId, conf, new ProcessCallback() {
final ExecutionStatus info = wfExecutorClient.startWorkflow(wfId, conf, new ProcessCallback() {
@Override
public void onSuccess(final WorkflowProcess t) {

View File

@ -8,7 +8,7 @@ import eu.dnetlib.domain.mdstore.MDStoreType;
import eu.dnetlib.wfs.annotations.WfInputParam;
import eu.dnetlib.wfs.annotations.WfNode;
import eu.dnetlib.wfs.annotations.WfOutputParam;
import eu.dnetlib.wfs.clients.MDStoreClient;
import eu.dnetlib.wfs.clients.BaseMDStoreClient;
import eu.dnetlib.wfs.nodes.AbstractJobNode;
@WfNode("createMdStore")
@ -36,7 +36,7 @@ public class CreateMdStoreNode extends AbstractJobNode {
private String mdId;
@Autowired
private MDStoreClient mdStoreClient;
private BaseMDStoreClient mdStoreClient;
@Override
protected void execute() throws Exception {

View File

@ -4,7 +4,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import eu.dnetlib.wfs.annotations.WfInputParam;
import eu.dnetlib.wfs.annotations.WfNode;
import eu.dnetlib.wfs.clients.MDStoreClient;
import eu.dnetlib.wfs.clients.BaseMDStoreClient;
import eu.dnetlib.wfs.nodes.AbstractJobNode;
@WfNode("deleteMdStore")
@ -14,7 +14,7 @@ public class DeleteMdStoreNode extends AbstractJobNode {
private String mdId;
@Autowired
private MDStoreClient mdStoreClient;
private BaseMDStoreClient mdStoreClient;
@Override
protected void execute() throws Exception {

View File

@ -1,8 +1,11 @@
package eu.dnetlib.wfs.clients;
import org.springframework.stereotype.Component;
import eu.dnetlib.domain.mdstore.MDStoreType;
public abstract class MDStoreClient {
@Component
public class BaseMDStoreClient {
public String createMDStore(final String format,
final String layout,

View File

@ -1,13 +1,20 @@
package eu.dnetlib.wfs.manager.client;
package eu.dnetlib.wfs.clients;
import org.springframework.stereotype.Service;
import org.springframework.stereotype.Component;
import eu.dnetlib.domain.wfs.WorkflowConfiguration;
import eu.dnetlib.wfs.procs.ExecutionStatus;
import eu.dnetlib.wfs.utils.ProcessCallback;
@Service
@Component
public class WfExecutorClient {
// Use a QUEUE or a DB table ?
public ExecutionStatus startWorkflow(final String wfId, final WorkflowConfiguration conf, final ProcessCallback processCallback) {
// TODO Auto-generated method stub
return null;
}
public void startWorkflowConfiguration(final WorkflowConfiguration conf) {
// TODO Auto-generated method stub
@ -17,5 +24,4 @@ public class WfExecutorClient {
// TODO Auto-generated method stub
return null;
}
}

View File

@ -7,9 +7,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -20,9 +20,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -33,9 +33,9 @@ services:
networks:
- backend
depends_on:
- dsmdb
- db-dsm
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://dsmdb:${PG_PORT}/${PG_DSM_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-dsm:${PG_PORT}/${PG_DSM_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -46,9 +46,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -59,9 +59,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -72,9 +72,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -85,9 +85,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -98,9 +98,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-wfs
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-wfs:${PG_PORT}/${PG_WFS_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -111,9 +111,9 @@ services:
networks:
- backend
depends_on:
- maindb
- db-wfs
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-wfs:${PG_PORT}/${PG_WFS_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
@ -124,13 +124,13 @@ services:
networks:
- backend
depends_on:
- maindb
- db-main
environment:
- SPRING_DATASOURCE_URL=jdbc:postgresql://maindb:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_URL=jdbc:postgresql://db-main:${PG_PORT}/${PG_MAIN_DB}
- SPRING_DATASOURCE_USERNAME=${PG_USER}
- SPRING_DATASOURCE_PASSWORD=${PG_PASSWORD}
maindb:
db-main:
image: postgres:15.4
restart: always
expose:
@ -145,7 +145,7 @@ services:
- ./data/sql/schema.sql:/docker-entrypoint-initdb.d/init.sql
- pg_main_data:/var/lib/postgresql/data
dsmdb:
db-dsm:
image: postgres:15.4
restart: always
expose:
@ -160,7 +160,7 @@ services:
- ./data/sql/dsm.sql:/docker-entrypoint-initdb.d/init.sql
- pg_dsm_data:/var/lib/postgresql/data
mdstoredb:
db-mdstores:
image: postgres:15.4
restart: always
expose:
@ -175,7 +175,21 @@ services:
- ./data/sql/mdstore_schema.sql:/docker-entrypoint-initdb.d/init.sql
- pg_mdstore_data:/var/lib/postgresql/data
db-wfs:
image: postgres:15.4
restart: always
expose:
- ${PG_PORT}
networks:
- backend
environment:
POSTGRES_USER: ${PG_USER}
POSTGRES_PASSWORD: ${PG_PASSWORD}
POSTGRES_DB: ${PG_WFS_DB}
volumes:
- ./data/sql/wfs.sql:/docker-entrypoint-initdb.d/init.sql
- pg_wfs_data:/var/lib/postgresql/data
solr:
image: solr:9.3.0
expose:
@ -188,7 +202,6 @@ services:
- solr-precreate
- gettingstarted
reverse-proxy:
image: nginx
volumes:
@ -200,6 +213,17 @@ services:
- backend
environment:
- NGINX_PORT=${PROXY_PORT}
depends_on:
- context-manager
- dsm
- mail-sender
- indexer
- mdsm
- oai-exporter
- resource-manager
- vocabulary-manager
- wf-exec-postgres
- wf-manager
networks:
backend:
@ -210,3 +234,5 @@ volumes:
pg_main_data:
pg_dsm_data:
pg_mdstore_data:
pg_wfs_data:

View File

@ -12,6 +12,7 @@ export PG_PASSWORD=ax45vs#1A
export PG_MAIN_DB=dnet_is
export PG_DSM_DB=dnet_dsm
export PG_MDSTORE_DB=dnet_mdstores
export PG_WFS_DB=dnet_wfs
docker-compose up --force-recreate --build