Compare commits

..

16 Commits

Author SHA1 Message Date
104fd05ce3 refactor: explicitly set logging - to squash 2024-11-08 10:44:56 +01:00
f3a14adf62 refactor: explicitly set logging 2024-11-08 10:27:08 +01:00
967e45624e Add exporter configuration to focus (#249) 2024-11-05 15:46:00 +01:00
8cd5b93b95 Merge pull request #237 from samply/feat/ccp-ppi
Feat/ccp ppi
2024-11-05 13:59:01 +01:00
Jan
52c24ee6fa fix(fhir2sql): add the postgres connection string to focus (#245) 2024-11-05 13:34:36 +01:00
26712d3567 feat: add and set FOCUS_ENDPOINT_TYPE to support fhir2sql (#244)
* add and set FOCUS_ENDPOINT_TYPE

Co-authored-by: Jan <59206115+Threated@users.noreply.github.com>

---------

Co-authored-by: davidmscholz <david.scholz@dkfz-heidelberg.de>
Co-authored-by: Jan <59206115+Threated@users.noreply.github.com>
2024-10-30 15:21:15 +01:00
eea17c3478 fix: remove invalid beam-proxy tag and add it to focus instead (#242) 2024-10-22 16:48:48 +02:00
cf5230963c feat: add focus tags in ccp and bbmri (#240)
Co-authored-by: p.delpy@dkfz-heidelberg.de <p.delpy@dkfz-heidelberg.de>
2024-10-21 11:00:01 +02:00
7aaee5e7d5 feat: add auto archiving action (#238)
* feat: add auto archiving action

---------

Co-authored-by: p.delpy@dkfz-heidelberg.de <p.delpy@dkfz-heidelberg.de>
Co-authored-by: Martin Lablans <6804500+lablans@users.noreply.github.com>
2024-10-15 13:03:42 +02:00
3312ca8a64 feat: added blaze cql cache (#236) 2024-10-10 14:34:28 +02:00
23981062bb Move ppi to id-management 2024-10-10 13:27:24 +02:00
8e7fe6851e fix: use correct mainzelliste api key 2024-10-10 13:11:43 +02:00
760d599b7c Add CCP-PPI 2024-10-10 12:56:36 +02:00
072ee348fc fix: deactivate landingpage for KR project (#234)
fix: deactivate landingpage for KR project
2024-10-09 09:24:27 +02:00
f328e40963 Merge pull request #233 from samply/fix/id-management-redirection
Allow Usage of Centraxx Interface without login
2024-10-08 13:30:44 +02:00
eb2955872f fix: allow usage of centraxx interface without login
Before this change CentraXX was redirected to the
central login servers then interacting with the id-management
2024-10-01 13:30:23 +02:00
23 changed files with 132 additions and 139 deletions

View File

@ -0,0 +1,39 @@
import os
import requests
from datetime import datetime, timedelta
# Configuration
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
REPO = 'samply/bridgehead'
HEADERS = {'Authorization': f'token {GITHUB_TOKEN}', 'Accept': 'application/vnd.github.v3+json'}
API_URL = f'https://api.github.com/repos/{REPO}/branches'
INACTIVE_DAYS = 365
CUTOFF_DATE = datetime.now() - timedelta(days=INACTIVE_DAYS)
# Fetch all branches
def get_branches():
response = requests.get(API_URL, headers=HEADERS)
response.raise_for_status()
return response.json() if response.status_code == 200 else []
# Rename inactive branches
def rename_branch(old_name, new_name):
rename_url = f'https://api.github.com/repos/{REPO}/branches/{old_name}/rename'
response = requests.post(rename_url, json={'new_name': new_name}, headers=HEADERS)
response.raise_for_status()
print(f"Renamed branch {old_name} to {new_name}" if response.status_code == 201 else f"Failed to rename {old_name}: {response.status_code}")
# Check if the branch is inactive
def is_inactive(commit_url):
last_commit_date = requests.get(commit_url, headers=HEADERS).json()['commit']['committer']['date']
return datetime.strptime(last_commit_date, '%Y-%m-%dT%H:%M:%SZ') < CUTOFF_DATE
# Rename inactive branches
def main():
for branch in get_branches():
if is_inactive(branch['commit']['url']):
#rename_branch(branch['name'], f"archived/{branch['name']}")
print(f"[LOG] Branch '{branch['name']}' is inactive and would be renamed to 'archived/{branch['name']}'")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,27 @@
name: Cleanup - Rename Inactive Branches
on:
schedule:
- cron: '0 0 * * 0' # Runs every Sunday at midnight
jobs:
archive-stale-branches:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install Libraries
run: pip install requests
- name: Run Script to Rename Inactive Branches
run: |
python .github/scripts/rename_inactive_branches.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -10,7 +10,8 @@ services:
BASE_URL: "http://bridgehead-bbmri-blaze:8080" BASE_URL: "http://bridgehead-bbmri-blaze:8080"
JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m" JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m"
DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000} DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000}
DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP DB_BLOCK_CACHE_SIZE: ${BLAZE_MEMORY_CAP}
CQL_EXPR_CACHE_SIZE: ${BLAZE_CQL_CACHE_CAP:-32}
ENFORCE_REFERENTIAL_INTEGRITY: "false" ENFORCE_REFERENTIAL_INTEGRITY: "false"
volumes: volumes:
- "blaze-data:/app/data" - "blaze-data:/app/data"

View File

@ -2,7 +2,7 @@ version: "3.7"
services: services:
focus-eric: focus-eric:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG} image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-bbmri
container_name: bridgehead-focus-eric container_name: bridgehead-focus-eric
environment: environment:
API_KEY: ${ERIC_FOCUS_BEAM_SECRET_SHORT} API_KEY: ${ERIC_FOCUS_BEAM_SECRET_SHORT}

View File

@ -2,7 +2,7 @@ version: "3.7"
services: services:
focus-gbn: focus-gbn:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG} image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-bbmri
container_name: bridgehead-focus-gbn container_name: bridgehead-focus-gbn
environment: environment:
API_KEY: ${GBN_FOCUS_BEAM_SECRET_SHORT} API_KEY: ${GBN_FOCUS_BEAM_SECRET_SHORT}

View File

@ -8,8 +8,10 @@ services:
BASE_URL: "http://bridgehead-ccp-blaze:8080" BASE_URL: "http://bridgehead-ccp-blaze:8080"
JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m" JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m"
DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000} DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000}
DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP DB_BLOCK_CACHE_SIZE: ${BLAZE_MEMORY_CAP}
CQL_EXPR_CACHE_SIZE: ${BLAZE_CQL_CACHE_CAP:-32}
ENFORCE_REFERENTIAL_INTEGRITY: "false" ENFORCE_REFERENTIAL_INTEGRITY: "false"
LOG_LEVEL: ${LOG_LEVEL_BLAZE:-WARN}
volumes: volumes:
- "blaze-data:/app/data" - "blaze-data:/app/data"
labels: labels:
@ -21,7 +23,7 @@ services:
- "traefik.http.routers.blaze_ccp.tls=true" - "traefik.http.routers.blaze_ccp.tls=true"
focus: focus:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG} image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-dktk
container_name: bridgehead-focus container_name: bridgehead-focus
environment: environment:
API_KEY: ${FOCUS_BEAM_SECRET_SHORT} API_KEY: ${FOCUS_BEAM_SECRET_SHORT}
@ -32,6 +34,8 @@ services:
RETRY_COUNT: ${FOCUS_RETRY_COUNT} RETRY_COUNT: ${FOCUS_RETRY_COUNT}
EPSILON: 0.28 EPSILON: 0.28
QUERIES_TO_CACHE: '/queries_to_cache.conf' QUERIES_TO_CACHE: '/queries_to_cache.conf'
ENDPOINT_TYPE: ${FOCUS_ENDPOINT_TYPE:-blaze}
RUST_LOG: ${LOG_LEVEL_FOCUS:-WARN}
volumes: volumes:
- /srv/docker/bridgehead/ccp/queries_to_cache.conf:/queries_to_cache.conf - /srv/docker/bridgehead/ccp/queries_to_cache.conf:/queries_to_cache.conf
depends_on: depends_on:
@ -49,6 +53,7 @@ services:
ALL_PROXY: http://forward_proxy:3128 ALL_PROXY: http://forward_proxy:3128
TLS_CA_CERTIFICATES_DIR: /conf/trusted-ca-certs TLS_CA_CERTIFICATES_DIR: /conf/trusted-ca-certs
ROOTCERT_FILE: /conf/root.crt.pem ROOTCERT_FILE: /conf/root.crt.pem
RUST_LOG: ${LOG_LEVEL_FOCUS:-WARN}
secrets: secrets:
- proxy.pem - proxy.pem
depends_on: depends_on:
@ -57,7 +62,6 @@ services:
- /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro - /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro
- /srv/docker/bridgehead/ccp/root.crt.pem:/conf/root.crt.pem:ro - /srv/docker/bridgehead/ccp/root.crt.pem:/conf/root.crt.pem:ro
volumes: volumes:
blaze-data: blaze-data:

View File

@ -10,6 +10,7 @@ services:
DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000} DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000}
DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP
ENFORCE_REFERENTIAL_INTEGRITY: "false" ENFORCE_REFERENTIAL_INTEGRITY: "false"
LOG_LEVEL: ${LOG_LEVEL_BLAZE:-WARN}
volumes: volumes:
- "blaze-secondary-data:/app/data" - "blaze-secondary-data:/app/data"
labels: labels:

View File

@ -10,6 +10,7 @@ services:
DISABLE_AUTH: "true" # https://rocker-project.org/images/versioned/rstudio.html#how-to-use DISABLE_AUTH: "true" # https://rocker-project.org/images/versioned/rstudio.html#how-to-use
HTTP_RELATIVE_PATH: "/rstudio" HTTP_RELATIVE_PATH: "/rstudio"
ALL_PROXY: "http://forward_proxy:3128" # https://rocker-project.org/use/networking.html ALL_PROXY: "http://forward_proxy:3128" # https://rocker-project.org/use/networking.html
LOG_LEVEL: ${LOG_LEVEL_RSTUDIO:-WARN}
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.rstudio_ccp.rule=PathPrefix(`/rstudio`)" - "traefik.http.routers.rstudio_ccp.rule=PathPrefix(`/rstudio`)"
@ -54,6 +55,7 @@ services:
BEAM_APP_ID: token-manager.${PROXY_ID} BEAM_APP_ID: token-manager.${PROXY_ID}
BEAM_SECRET: ${TOKEN_MANAGER_SECRET} BEAM_SECRET: ${TOKEN_MANAGER_SECRET}
BEAM_DATASHIELD_PROXY: request-manager BEAM_DATASHIELD_PROXY: request-manager
LOG_LEVEL: ${LOG_LEVEL_OPAL:-WARN}
volumes: volumes:
- "/var/cache/bridgehead/ccp/opal-metadata-db:/srv" # Opal metadata - "/var/cache/bridgehead/ccp/opal-metadata-db:/srv" # Opal metadata
secrets: secrets:
@ -75,6 +77,8 @@ services:
image: docker.verbis.dkfz.de/ccp/dktk-rserver # datashield/rock-base + dsCCPhos image: docker.verbis.dkfz.de/ccp/dktk-rserver # datashield/rock-base + dsCCPhos
tmpfs: tmpfs:
- /srv - /srv
environment:
LOG_LEVEL: ${LOG_LEVEL_OPAL:-WARN}
beam-connect: beam-connect:
image: docker.verbis.dkfz.de/cache/samply/beam-connect:develop image: docker.verbis.dkfz.de/cache/samply/beam-connect:develop
@ -87,6 +91,7 @@ services:
DISCOVERY_URL: "./map/central.json" DISCOVERY_URL: "./map/central.json"
LOCAL_TARGETS_FILE: "./map/local.json" LOCAL_TARGETS_FILE: "./map/local.json"
NO_AUTH: "true" NO_AUTH: "true"
RUST_LOG: ${LOG_LEVEL_BEAMCONNECT:-WARN}
secrets: secrets:
- opal-cert.pem - opal-cert.pem
depends_on: depends_on:
@ -151,7 +156,7 @@ services:
--pass-access-token=false --pass-access-token=false
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.oauth2_proxy.rule=Host(`${HOST}`) && PathPrefix(`/oauth2`)" - "traefik.http.routers.oauth2_proxy.rule=PathPrefix(`/oauth2`)"
- "traefik.http.services.oauth2_proxy.loadbalancer.server.port=4180" - "traefik.http.services.oauth2_proxy.loadbalancer.server.port=4180"
- "traefik.http.routers.oauth2_proxy.tls=true" - "traefik.http.routers.oauth2_proxy.tls=true"
environment: environment:

View File

@ -17,7 +17,7 @@ services:
HTTP_PROXY: "http://forward_proxy:3128" HTTP_PROXY: "http://forward_proxy:3128"
HTTPS_PROXY: "http://forward_proxy:3128" HTTPS_PROXY: "http://forward_proxy:3128"
NO_PROXY: beam-proxy,dnpm-backend,host.docker.internal${DNPM_ADDITIONAL_NO_PROXY} NO_PROXY: beam-proxy,dnpm-backend,host.docker.internal${DNPM_ADDITIONAL_NO_PROXY}
RUST_LOG: ${RUST_LOG:-info} RUST_LOG: ${LOG_LEVEL_BEAMCONNECTDNPM:-WARN}
NO_AUTH: "true" NO_AUTH: "true"
TLS_CA_CERTIFICATES_DIR: ./conf/trusted-ca-certs TLS_CA_CERTIFICATES_DIR: ./conf/trusted-ca-certs
extra_hosts: extra_hosts:

View File

@ -6,7 +6,6 @@ services:
container_name: bridgehead-ccp-exporter container_name: bridgehead-ccp-exporter
environment: environment:
JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC" JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC"
LOG_LEVEL: "INFO"
EXPORTER_API_KEY: "${EXPORTER_API_KEY}" # Set in exporter-setup.sh EXPORTER_API_KEY: "${EXPORTER_API_KEY}" # Set in exporter-setup.sh
CROSS_ORIGINS: "https://${HOST}" CROSS_ORIGINS: "https://${HOST}"
EXPORTER_DB_USER: "exporter" EXPORTER_DB_USER: "exporter"
@ -16,6 +15,7 @@ services:
SITE: "${SITE_ID}" SITE: "${SITE_ID}"
HTTP_SERVLET_REQUEST_SCHEME: "https" HTTP_SERVLET_REQUEST_SCHEME: "https"
OPAL_PASSWORD: "${EXPORTER_OPAL_PASSWORD}" OPAL_PASSWORD: "${EXPORTER_OPAL_PASSWORD}"
LOG_LEVEL: ${LOG_LEVEL_EXPORTER:-WARN}
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.exporter_ccp.rule=PathPrefix(`/ccp-exporter`)" - "traefik.http.routers.exporter_ccp.rule=PathPrefix(`/ccp-exporter`)"
@ -42,7 +42,6 @@ services:
container_name: bridgehead-ccp-reporter container_name: bridgehead-ccp-reporter
environment: environment:
JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC" JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC"
LOG_LEVEL: "INFO"
CROSS_ORIGINS: "https://${HOST}" CROSS_ORIGINS: "https://${HOST}"
HTTP_RELATIVE_PATH: "/ccp-reporter" HTTP_RELATIVE_PATH: "/ccp-reporter"
SITE: "${SITE_ID}" SITE: "${SITE_ID}"
@ -50,6 +49,7 @@ services:
EXPORTER_URL: "http://exporter:8092" EXPORTER_URL: "http://exporter:8092"
LOG_FHIR_VALIDATION: "false" LOG_FHIR_VALIDATION: "false"
HTTP_SERVLET_REQUEST_SCHEME: "https" HTTP_SERVLET_REQUEST_SCHEME: "https"
LOG_LEVEL: ${LOG_LEVEL_REPORTER:-WARN}
# In this initial development state of the bridgehead, we are trying to have so many volumes as possible. # In this initial development state of the bridgehead, we are trying to have so many volumes as possible.
# However, in the first executions in the CCP sites, this volume seems to be very important. A report is # However, in the first executions in the CCP sites, this volume seems to be very important. A report is
@ -65,3 +65,8 @@ services:
- "traefik.http.routers.reporter_ccp.tls=true" - "traefik.http.routers.reporter_ccp.tls=true"
- "traefik.http.middlewares.reporter_ccp_strip.stripprefix.prefixes=/ccp-reporter" - "traefik.http.middlewares.reporter_ccp_strip.stripprefix.prefixes=/ccp-reporter"
- "traefik.http.routers.reporter_ccp.middlewares=reporter_ccp_strip" - "traefik.http.routers.reporter_ccp.middlewares=reporter_ccp_strip"
focus:
environment:
EXPORTER_URL: "http://exporter:8092"
AUTH_HEADER: "${EXPORTER_API_KEY}"

View File

@ -23,3 +23,7 @@ services:
POSTGRES_DB: "dashboard" POSTGRES_DB: "dashboard"
volumes: volumes:
- "/var/cache/bridgehead/ccp/dashboard-db:/var/lib/postgresql/data" - "/var/cache/bridgehead/ccp/dashboard-db:/var/lib/postgresql/data"
focus:
environment:
POSTGRES_CONNECTION_STRING: "postgresql://dashboard:${DASHBOARD_DB_PASSWORD}@dashboard-db/dashboard"

View File

@ -4,4 +4,5 @@ if [ "$ENABLE_FHIR2SQL" == true ]; then
log INFO "Dashboard setup detected -- will start Dashboard backend and FHIR2SQL service." log INFO "Dashboard setup detected -- will start Dashboard backend and FHIR2SQL service."
OVERRIDE+=" -f ./$PROJECT/modules/fhir2sql-compose.yml" OVERRIDE+=" -f ./$PROJECT/modules/fhir2sql-compose.yml"
DASHBOARD_DB_PASSWORD="$(generate_simple_password 'fhir2sql')" DASHBOARD_DB_PASSWORD="$(generate_simple_password 'fhir2sql')"
FOCUS_ENDPOINT_TYPE="blaze-and-sql"
fi fi

View File

@ -14,15 +14,24 @@ services:
MAGICPL_CONNECTOR_APIKEY: ${IDMANAGER_READ_APIKEY} MAGICPL_CONNECTOR_APIKEY: ${IDMANAGER_READ_APIKEY}
MAGICPL_CENTRAL_PATIENTLIST_APIKEY: ${IDMANAGER_CENTRAL_PATIENTLIST_APIKEY} MAGICPL_CENTRAL_PATIENTLIST_APIKEY: ${IDMANAGER_CENTRAL_PATIENTLIST_APIKEY}
MAGICPL_CONTROLNUMBERGENERATOR_APIKEY: ${IDMANAGER_CONTROLNUMBERGENERATOR_APIKEY} MAGICPL_CONTROLNUMBERGENERATOR_APIKEY: ${IDMANAGER_CONTROLNUMBERGENERATOR_APIKEY}
ML_LOG_LEVEL: ${LOG_LEVEL_IDMANAGER:-WARN}
depends_on: depends_on:
- patientlist - patientlist
- traefik-forward-auth - traefik-forward-auth
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
# Router with Authentication
- "traefik.http.routers.id-manager.rule=PathPrefix(`/id-manager`)" - "traefik.http.routers.id-manager.rule=PathPrefix(`/id-manager`)"
- "traefik.http.services.id-manager.loadbalancer.server.port=8080"
- "traefik.http.routers.id-manager.tls=true" - "traefik.http.routers.id-manager.tls=true"
- "traefik.http.routers.id-manager.middlewares=traefik-forward-auth-idm" - "traefik.http.routers.id-manager.middlewares=traefik-forward-auth-idm"
- "traefik.http.routers.id-manager.service=id-manager-service"
# Router without Authentication
- "traefik.http.routers.id-manager-compatibility.rule=PathPrefix(`/id-manager/paths/translator/getIds`)"
- "traefik.http.routers.id-manager-compatibility.tls=true"
- "traefik.http.routers.id-manager-compatibility.service=id-manager-service"
# Definition of Service
- "traefik.http.services.id-manager-service.loadbalancer.server.port=8080"
- "traefik.http.services.id-manager-service.loadbalancer.server.scheme=http"
patientlist: patientlist:
image: docker.verbis.dkfz.de/bridgehead/mainzelliste image: docker.verbis.dkfz.de/bridgehead/mainzelliste
@ -36,6 +45,8 @@ services:
- ML_UPLOAD_API_KEY=${IDMANAGER_UPLOAD_APIKEY} - ML_UPLOAD_API_KEY=${IDMANAGER_UPLOAD_APIKEY}
# Add Variables from /etc/patientlist-id-generators.env # Add Variables from /etc/patientlist-id-generators.env
- PATIENTLIST_SEEDS_TRANSFORMED - PATIENTLIST_SEEDS_TRANSFORMED
- ML_LOG_LEVEL=${LOG_LEVEL_PATIENTLIST:-WARN}
#TODO confirm LOG_LEVEL
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.patientlist.rule=PathPrefix(`/patientlist`)" - "traefik.http.routers.patientlist.rule=PathPrefix(`/patientlist`)"
@ -57,7 +68,7 @@ services:
- "/tmp/bridgehead/patientlist/:/docker-entrypoint-initdb.d/" - "/tmp/bridgehead/patientlist/:/docker-entrypoint-initdb.d/"
traefik-forward-auth: traefik-forward-auth:
image: docker.verbis.dkfz.de/cache/oauth2-proxy/oauth2-proxy:v7.6.0 image: docker.verbis.dkfz.de/cache/oauth2-proxy/oauth2-proxy:latest
environment: environment:
- http_proxy=http://forward_proxy:3128 - http_proxy=http://forward_proxy:3128
- https_proxy=http://forward_proxy:3128 - https_proxy=http://forward_proxy:3128
@ -67,6 +78,7 @@ services:
- OAUTH2_PROXY_CLIENT_ID=bridgehead-${SITE_ID} - OAUTH2_PROXY_CLIENT_ID=bridgehead-${SITE_ID}
- OAUTH2_PROXY_CLIENT_SECRET=${IDMANAGER_AUTH_CLIENT_SECRET} - OAUTH2_PROXY_CLIENT_SECRET=${IDMANAGER_AUTH_CLIENT_SECRET}
- OAUTH2_PROXY_COOKIE_SECRET=${IDMANAGER_AUTH_COOKIE_SECRET} - OAUTH2_PROXY_COOKIE_SECRET=${IDMANAGER_AUTH_COOKIE_SECRET}
- OAUTH2_PROXY_COOKIE_NAME=_BRIDGEHEAD_oauth2_idm
- OAUTH2_PROXY_COOKIE_DOMAINS=.${HOST} - OAUTH2_PROXY_COOKIE_DOMAINS=.${HOST}
- OAUTH2_PROXY_HTTP_ADDRESS=:4180 - OAUTH2_PROXY_HTTP_ADDRESS=:4180
- OAUTH2_PROXY_REVERSE_PROXY=true - OAUTH2_PROXY_REVERSE_PROXY=true
@ -92,5 +104,12 @@ services:
forward_proxy: forward_proxy:
condition: service_healthy condition: service_healthy
ccp-patient-project-identificator:
image: samply/ccp-patient-project-identificator
container_name: bridgehead-ccp-patient-project-identificator
environment:
MAINZELLISTE_APIKEY: ${IDMANAGER_LOCAL_PATIENTLIST_APIKEY}
SITE_NAME: ${SITE_NAME}
volumes: volumes:
patientlist-db-data: patientlist-db-data:

View File

@ -25,6 +25,7 @@ services:
OIDC_CLIENT_SECRET: "${OIDC_CLIENT_SECRET}" OIDC_CLIENT_SECRET: "${OIDC_CLIENT_SECRET}"
OIDC_REALM: "${OIDC_REALM}" OIDC_REALM: "${OIDC_REALM}"
OIDC_URL: "${OIDC_URL}" OIDC_URL: "${OIDC_URL}"
LOG_LEVEL: ${LOG_LEVEL_MTBA:-WARN}
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"

View File

@ -12,6 +12,7 @@ services:
CTS_API_KEY: ${NNGM_CTS_APIKEY} CTS_API_KEY: ${NNGM_CTS_APIKEY}
CRYPT_KEY: ${NNGM_CRYPTKEY} CRYPT_KEY: ${NNGM_CRYPTKEY}
#CTS_MAGICPL_SITE: ${SITE_ID}TODO #CTS_MAGICPL_SITE: ${SITE_ID}TODO
LOG_LEVEL: ${LOG_LEVEL_NNGM:-WARN}
restart: always restart: always
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"

View File

@ -10,6 +10,7 @@ services:
SALT: ${LOCAL_SALT} SALT: ${LOCAL_SALT}
KEEP_INTERNAL_ID: ${KEEP_INTERNAL_ID:-false} KEEP_INTERNAL_ID: ${KEEP_INTERNAL_ID:-false}
MAINZELLISTE_URL: ${PATIENTLIST_URL:-http://patientlist:8080/patientlist} MAINZELLISTE_URL: ${PATIENTLIST_URL:-http://patientlist:8080/patientlist}
LOG_LEVEL: ${LOG_LEVEL_REPORTER:-WARN}
restart: always restart: always
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"

View File

@ -17,6 +17,7 @@ services:
TEILER_DASHBOARD_URL: "https://${HOST}/ccp-teiler-dashboard" TEILER_DASHBOARD_URL: "https://${HOST}/ccp-teiler-dashboard"
DEFAULT_LANGUAGE: "${TEILER_DEFAULT_LANGUAGE_LOWER_CASE}" DEFAULT_LANGUAGE: "${TEILER_DEFAULT_LANGUAGE_LOWER_CASE}"
HTTP_RELATIVE_PATH: "/ccp-teiler" HTTP_RELATIVE_PATH: "/ccp-teiler"
LOG_LEVEL: ${LOG_LEVEL_TEILER:-WARN}
teiler-dashboard: teiler-dashboard:
image: docker.verbis.dkfz.de/cache/samply/teiler-dashboard:develop image: docker.verbis.dkfz.de/cache/samply/teiler-dashboard:develop
@ -47,6 +48,7 @@ services:
TEILER_ADMIN: "${OIDC_ADMIN_GROUP}" TEILER_ADMIN: "${OIDC_ADMIN_GROUP}"
REPORTER_DEFAULT_TEMPLATE_ID: "ccp-qb" REPORTER_DEFAULT_TEMPLATE_ID: "ccp-qb"
EXPORTER_DEFAULT_TEMPLATE_ID: "ccp" EXPORTER_DEFAULT_TEMPLATE_ID: "ccp"
LOG_LEVEL: ${LOG_LEVEL_TEILER:-WARN}
teiler-backend: teiler-backend:
@ -60,7 +62,6 @@ services:
- "traefik.http.middlewares.teiler_backend_ccp_strip.stripprefix.prefixes=/ccp-teiler-backend" - "traefik.http.middlewares.teiler_backend_ccp_strip.stripprefix.prefixes=/ccp-teiler-backend"
- "traefik.http.routers.teiler_backend_ccp.middlewares=teiler_backend_ccp_strip" - "traefik.http.routers.teiler_backend_ccp.middlewares=teiler_backend_ccp_strip"
environment: environment:
LOG_LEVEL: "INFO"
APPLICATION_PORT: "8085" APPLICATION_PORT: "8085"
APPLICATION_ADDRESS: "${HOST}" APPLICATION_ADDRESS: "${HOST}"
DEFAULT_LANGUAGE: "${TEILER_DEFAULT_LANGUAGE}" DEFAULT_LANGUAGE: "${TEILER_DEFAULT_LANGUAGE}"
@ -73,6 +74,7 @@ services:
HTTP_PROXY: "http://forward_proxy:3128" HTTP_PROXY: "http://forward_proxy:3128"
ENABLE_MTBA: "${ENABLE_MTBA}" ENABLE_MTBA: "${ENABLE_MTBA}"
ENABLE_DATASHIELD: "${ENABLE_DATASHIELD}" ENABLE_DATASHIELD: "${ENABLE_DATASHIELD}"
LOG_LEVEL: ${LOG_LEVEL_TEILER:-WARN}
secrets: secrets:
- ccp.conf - ccp.conf

View File

@ -1,6 +1,10 @@
version: "3.7" version: "3.7"
services: services:
landing:
deploy:
replicas: 0 #deactivate landing page
blaze: blaze:
image: docker.verbis.dkfz.de/cache/samply/blaze:0.28 image: docker.verbis.dkfz.de/cache/samply/blaze:0.28
container_name: bridgehead-kr-blaze container_name: bridgehead-kr-blaze

View File

@ -1,6 +1,8 @@
version: "3.7" version: "3.7"
services: services:
landing: landing:
deploy:
replicas: 1 #reactivate if lens is in use
container_name: lens_federated-search container_name: lens_federated-search
image: docker.verbis.dkfz.de/ccp/lens:${SITE_ID} image: docker.verbis.dkfz.de/ccp/lens:${SITE_ID}
labels: labels:

View File

@ -171,8 +171,10 @@ optimizeBlazeMemoryUsage() {
if [ $available_system_memory_chunks -eq 0 ]; then if [ $available_system_memory_chunks -eq 0 ]; then
log WARN "Only ${BLAZE_MEMORY_CAP} system memory available for Blaze. If your Blaze stores more than 128000 fhir ressources it will run significally slower." log WARN "Only ${BLAZE_MEMORY_CAP} system memory available for Blaze. If your Blaze stores more than 128000 fhir ressources it will run significally slower."
export BLAZE_RESOURCE_CACHE_CAP=128000; export BLAZE_RESOURCE_CACHE_CAP=128000;
export BLAZE_CQL_CACHE_CAP=32;
else else
export BLAZE_RESOURCE_CACHE_CAP=$((available_system_memory_chunks * 312500)) export BLAZE_RESOURCE_CACHE_CAP=$((available_system_memory_chunks * 312500))
export BLAZE_CQL_CACHE_CAP=$((($system_memory_in_mb/4)/16));
fi fi
fi fi
} }

View File

@ -1,20 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDNTCCAh2gAwIBAgIUcGXxIZMxUOoI2kf8FArsOvQfvwwwDQYJKoZIhvcNAQEL
BQAwFjEUMBIGA1UEAxMLQnJva2VyLVJvb3QwHhcNMjQxMDMxMDkxOTUwWhcNMzQx
MDI5MDkyMDIwWjAWMRQwEgYDVQQDEwtCcm9rZXItUm9vdDCCASIwDQYJKoZIhvcN
AQEBBQADggEPADCCAQoCggEBALX+8X4r2mWki4HLs2E5dXR9oGL+8Zos1s9Rmeaz
FgxnpKf6wlop4ZlJd01Pgi3HNFo7XPFi76zalRsHS+rWN3tOy6r5KIjCYiqPb3AY
luZuy7jAQOBGHKODVfJH1QCRqsvEwRbOU6nNFAkMcjSxt5+PmwB1U7+Kvmly4sYI
i4t/gyVvcfEsiZ5LYQ7IpEf+or2Ugpb6j4KlTn+gKFzSfgl+yRhE0bnFEf0eBa+r
HLLpq4hL16+pb6/WZ4DfM9QDioX6Tj2Hje9Va4RJ2dROENuq5sJugdE28hH9qEwE
2bmKh6qvblgwkI3rJFkYH+scBtLEUH0KJY+SZ1iYHkoEaCkCAwEAAaN7MHkwDgYD
VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOe/txl3B7Sd
NFE+615Z3rfzqBR4MB8GA1UdIwQYMBaAFOe/txl3B7SdNFE+615Z3rfzqBR4MBYG
A1UdEQQPMA2CC0Jyb2tlci1Sb290MA0GCSqGSIb3DQEBCwUAA4IBAQA3kVJlBOHn
Tscsk1FKLYNWE/fr3oUNPUYzXi4lln+UNdRHSdXUPzBp4B5oIi3uymdYg2Rzq9Su
/xjE7++thgQJ37l/DpCm/TUmUFfH5ZqcaMPA+L21mw9G129teCP1nVuXjtYhwnBk
fRiz1tzpO1rZCxC+vxIhcPeYSKbaAQTywtJu0MpduGFrfIwLtrxa4GLRQFD06KPx
Ijq6Pt6kC2abcYtKCMCWmpzttQAq4csWbmWINKkD6GMkuJVpzEx3csg8rCyPCaX0
HedLiKRqaSOzDRnIWfD2CQX6qMg8TNtnxFnZTlc9honxnwcGaeLZKNEg+1oPA40V
NOffBIMF4DAV
-----END CERTIFICATE-----

View File

@ -1,91 +0,0 @@
version: "3.7"
services:
beam-proxy:
image: samply/beam-proxy:develop-sockets
container_name: bridgehead-beam-proxy
environment:
BROKER_URL: ${BROKER_URL}
PROXY_ID: ${PROXY_ID}
PRIVKEY_FILE: /run/secrets/proxy.pem
ALL_PROXY: http://forward_proxy:3128
TLS_CA_CERTIFICATES_DIR: /conf/trusted-ca-certs
ROOTCERT_FILE: /conf/root.crt.pem
APP_beamsel_KEY: ${BEAMSEL_SECRET}
secrets:
- proxy.pem
depends_on:
- "forward_proxy"
volumes:
- /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro
- /srv/docker/bridgehead/minimal/modules/onko.root.pem:/conf/root.crt.pem:ro
postgres:
image: postgres:9.5-alpine
container_name: bridgehead-onkofdz-postgres
environment:
POSTGRES_DB: mainzelliste-sel
POSTGRES_USER: mainzelliste-sel
POSTGRES_PASSWORD: ${MAINZELLISTE_DB_PASSWORD}
volumes:
# - ./postgres-logs:/var/log/postgresql
- ml-data:/var/lib/postgresql/data
depends_on:
- secureepilinker
mainzelliste:
image: medicalinformatics/mainzelliste:secureepilinker-alpha
container_name: bridgehead-onkofdz-mainzelliste
environment:
ML_API_KEY: ${LOCAL_SEL_API_KEY}
ML_DB_HOST: postgres
ML_DB_PORT: 5432
ML_DB_USER: mainzelliste-sel
ML_DB_NAME: mainzelliste-sel
ML_DB_PASS: ${MAINZELLISTE_DB_PASSWORD}
ML_LOCAL_ID: ${SITE_ID}
ML_LOCAL_SEL_URL: http://secureepilinker:8161
ML_LOCAL_CALLBACK_LINK_URL: http://mainzelliste:8080/Communicator/linkCallback
ML_LOCAL_CALLBACK_MATCH_URL: http://mainzelliste:8080/Communicator/matchCallback/${REMOTE_SEL_SITE}
ML_LOCAL_DATA_SERVICE_URL: http://mainzelliste:8080/Communicator/getAllRecords
ML_LOCAL_AUTHENTICATION_TYPE: apiKey
ML_LOCAL_API_KEY: ${LOCAL_SEL_API_KEY}
ML_SERVER_0_REMOTEID: ${REMOTE_SEL_SITE}
ML_SERVER_0_IDTYPE: link-${SITE_ID}-${REMOTE_SEL_SITE}
ML_SERVER_0_REMOTE_SEL_URL: http://beamsel:8080
ML_SERVER_0_APIKEY: ${REMOTE_SEL_API_KEY}
### Linkage Service not used for matching
ML_SERVER_0_LINKAGE_SERVICE_BASE_URL: ${LS_SEL_URL}
ML_SERVER_0_LINKAGE_SERVICE_AUTH_TYPE: apiKey
ML_SERVER_0_LINKAGE_SERVICE_SHARED_KEY: ${LS_SEL_SHARED_KEY}
ML_LOG_MODE: stdout #stdout=stdout everything else =logging in mainzelliste.log
ML_LOG_LEVEL: INFO
no_proxy: "localhost,secureepilinker"
volumes:
# - ./logs:/usr/local/tomcat/logs/
- /etc/bridgehead/onkofdz/config/mainzelliste.conf.docker:/run/secrets/mainzelliste.docker.conf
- /etc/bridgehead/onkofdz/config/sel.conf.docker:/run/secrets/sel.docker.conf
depends_on:
- postgres
- secureepilinker
secureepilinker:
image: docker.verbis.dkfz.de/onkofdz/secureepilinker:beamsel
container_name: bridgehead-onkofdz-secureepilinker
environment:
no_proxy: "mainzelliste,beamsel"
volumes:
- "/etc/bridgehead/onkofdz/config/epilinker.serverconf.json:/data/serverconf.json"
command: '-vvvv'
beamsel:
image: docker.verbis.dkfz.de/onkofdz/beam-sel
container_name: bridgehead-onkofdz-beamsel
environment:
BEAM_URL: "http://beam-proxy:8081"
BEAM_SECRET: ${BEAMSEL_SECRET}
BEAM_ID: beamsel.${PROXY_ID}
SEL_ADDR: "secureepilinker:8161"
depends_on:
- secureepilinker
volumes:
ml-data:
secrets:
proxy.pem:
file: /etc/bridgehead/pki/${SITE_ID}.priv.pem

View File

@ -1,15 +0,0 @@
#!/bin/bash
if [ -n "${ENABLE_ONKOFDZ}" ]; then
BROKER_ID=test.broker.onkofdz.samply.de
BROKER_URL=https://${BROKER_ID}
PROXY_ID=${SITE_ID}.${BROKER_ID}
BEAMSEL_SECRET="$(cat /proc/sys/kernel/random/uuid | sed 's/[-]//g' | head -c 20)"
SUPPORT_EMAIL=tobias.kussel@dkfz-heidelberg.de
PRIVATEKEYFILENAME=/etc/bridgehead/pki/${SITE_ID}.priv.pem
BROKER_URL_FOR_PREREQ=$BROKER_URL
log INFO "Loading OnkoFDZ module"
OVERRIDE+=" -f ./$PROJECT/modules/onkofdz-compose.yml"
fi