Compare commits

..

7 Commits

17 changed files with 136 additions and 112 deletions

View File

@ -1,39 +0,0 @@
import os
import requests
from datetime import datetime, timedelta
# Configuration
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
REPO = 'samply/bridgehead'
HEADERS = {'Authorization': f'token {GITHUB_TOKEN}', 'Accept': 'application/vnd.github.v3+json'}
API_URL = f'https://api.github.com/repos/{REPO}/branches'
INACTIVE_DAYS = 365
CUTOFF_DATE = datetime.now() - timedelta(days=INACTIVE_DAYS)
# Fetch all branches
def get_branches():
response = requests.get(API_URL, headers=HEADERS)
response.raise_for_status()
return response.json() if response.status_code == 200 else []
# Rename inactive branches
def rename_branch(old_name, new_name):
rename_url = f'https://api.github.com/repos/{REPO}/branches/{old_name}/rename'
response = requests.post(rename_url, json={'new_name': new_name}, headers=HEADERS)
response.raise_for_status()
print(f"Renamed branch {old_name} to {new_name}" if response.status_code == 201 else f"Failed to rename {old_name}: {response.status_code}")
# Check if the branch is inactive
def is_inactive(commit_url):
last_commit_date = requests.get(commit_url, headers=HEADERS).json()['commit']['committer']['date']
return datetime.strptime(last_commit_date, '%Y-%m-%dT%H:%M:%SZ') < CUTOFF_DATE
# Rename inactive branches
def main():
for branch in get_branches():
if is_inactive(branch['commit']['url']):
#rename_branch(branch['name'], f"archived/{branch['name']}")
print(f"[LOG] Branch '{branch['name']}' is inactive and would be renamed to 'archived/{branch['name']}'")
if __name__ == "__main__":
main()

View File

@ -1,27 +0,0 @@
name: Cleanup - Rename Inactive Branches
on:
schedule:
- cron: '0 0 * * 0' # Runs every Sunday at midnight
jobs:
archive-stale-branches:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install Libraries
run: pip install requests
- name: Run Script to Rename Inactive Branches
run: |
python .github/scripts/rename_inactive_branches.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -10,8 +10,7 @@ services:
BASE_URL: "http://bridgehead-bbmri-blaze:8080" BASE_URL: "http://bridgehead-bbmri-blaze:8080"
JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m" JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m"
DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000} DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000}
DB_BLOCK_CACHE_SIZE: ${BLAZE_MEMORY_CAP} DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP
CQL_EXPR_CACHE_SIZE: ${BLAZE_CQL_CACHE_CAP:-32}
ENFORCE_REFERENTIAL_INTEGRITY: "false" ENFORCE_REFERENTIAL_INTEGRITY: "false"
volumes: volumes:
- "blaze-data:/app/data" - "blaze-data:/app/data"

View File

@ -2,7 +2,7 @@ version: "3.7"
services: services:
focus-eric: focus-eric:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-bbmri image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}
container_name: bridgehead-focus-eric container_name: bridgehead-focus-eric
environment: environment:
API_KEY: ${ERIC_FOCUS_BEAM_SECRET_SHORT} API_KEY: ${ERIC_FOCUS_BEAM_SECRET_SHORT}

View File

@ -2,7 +2,7 @@ version: "3.7"
services: services:
focus-gbn: focus-gbn:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-bbmri image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}
container_name: bridgehead-focus-gbn container_name: bridgehead-focus-gbn
environment: environment:
API_KEY: ${GBN_FOCUS_BEAM_SECRET_SHORT} API_KEY: ${GBN_FOCUS_BEAM_SECRET_SHORT}

View File

@ -8,8 +8,7 @@ services:
BASE_URL: "http://bridgehead-ccp-blaze:8080" BASE_URL: "http://bridgehead-ccp-blaze:8080"
JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m" JAVA_TOOL_OPTIONS: "-Xmx${BLAZE_MEMORY_CAP:-4096}m"
DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000} DB_RESOURCE_CACHE_SIZE: ${BLAZE_RESOURCE_CACHE_CAP:-2500000}
DB_BLOCK_CACHE_SIZE: ${BLAZE_MEMORY_CAP} DB_BLOCK_CACHE_SIZE: $BLAZE_MEMORY_CAP
CQL_EXPR_CACHE_SIZE: ${BLAZE_CQL_CACHE_CAP:-32}
ENFORCE_REFERENTIAL_INTEGRITY: "false" ENFORCE_REFERENTIAL_INTEGRITY: "false"
volumes: volumes:
- "blaze-data:/app/data" - "blaze-data:/app/data"
@ -22,7 +21,7 @@ services:
- "traefik.http.routers.blaze_ccp.tls=true" - "traefik.http.routers.blaze_ccp.tls=true"
focus: focus:
image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}-dktk image: docker.verbis.dkfz.de/cache/samply/focus:${FOCUS_TAG}
container_name: bridgehead-focus container_name: bridgehead-focus
environment: environment:
API_KEY: ${FOCUS_BEAM_SECRET_SHORT} API_KEY: ${FOCUS_BEAM_SECRET_SHORT}
@ -33,7 +32,6 @@ services:
RETRY_COUNT: ${FOCUS_RETRY_COUNT} RETRY_COUNT: ${FOCUS_RETRY_COUNT}
EPSILON: 0.28 EPSILON: 0.28
QUERIES_TO_CACHE: '/queries_to_cache.conf' QUERIES_TO_CACHE: '/queries_to_cache.conf'
ENDPOINT_TYPE: ${FOCUS_ENDPOINT_TYPE:-blaze}
volumes: volumes:
- /srv/docker/bridgehead/ccp/queries_to_cache.conf:/queries_to_cache.conf - /srv/docker/bridgehead/ccp/queries_to_cache.conf:/queries_to_cache.conf
depends_on: depends_on:
@ -59,6 +57,7 @@ services:
- /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro - /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro
- /srv/docker/bridgehead/ccp/root.crt.pem:/conf/root.crt.pem:ro - /srv/docker/bridgehead/ccp/root.crt.pem:/conf/root.crt.pem:ro
volumes: volumes:
blaze-data: blaze-data:

View File

@ -22,7 +22,7 @@ services:
opal: opal:
container_name: bridgehead-opal container_name: bridgehead-opal
image: docker.verbis.dkfz.de/ccp/dktk-opal:test image: docker.verbis.dkfz.de/ccp/dktk-opal:latest
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.opal_ccp.rule=PathPrefix(`/opal`)" - "traefik.http.routers.opal_ccp.rule=PathPrefix(`/opal`)"
@ -151,7 +151,7 @@ services:
--pass-access-token=false --pass-access-token=false
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.oauth2_proxy.rule=PathPrefix(`/oauth2`)" - "traefik.http.routers.oauth2_proxy.rule=Host(`${HOST}`) && PathPrefix(`/oauth2`)"
- "traefik.http.services.oauth2_proxy.loadbalancer.server.port=4180" - "traefik.http.services.oauth2_proxy.loadbalancer.server.port=4180"
- "traefik.http.routers.oauth2_proxy.tls=true" - "traefik.http.routers.oauth2_proxy.tls=true"
environment: environment:

View File

@ -65,8 +65,3 @@ services:
- "traefik.http.routers.reporter_ccp.tls=true" - "traefik.http.routers.reporter_ccp.tls=true"
- "traefik.http.middlewares.reporter_ccp_strip.stripprefix.prefixes=/ccp-reporter" - "traefik.http.middlewares.reporter_ccp_strip.stripprefix.prefixes=/ccp-reporter"
- "traefik.http.routers.reporter_ccp.middlewares=reporter_ccp_strip" - "traefik.http.routers.reporter_ccp.middlewares=reporter_ccp_strip"
focus:
environment:
EXPORTER_URL: "http://exporter:8092"
AUTH_HEADER: "${EXPORTER_API_KEY}"

View File

@ -23,7 +23,3 @@ services:
POSTGRES_DB: "dashboard" POSTGRES_DB: "dashboard"
volumes: volumes:
- "/var/cache/bridgehead/ccp/dashboard-db:/var/lib/postgresql/data" - "/var/cache/bridgehead/ccp/dashboard-db:/var/lib/postgresql/data"
focus:
environment:
POSTGRES_CONNECTION_STRING: "postgresql://dashboard:${DASHBOARD_DB_PASSWORD}@dashboard-db/dashboard"

View File

@ -4,5 +4,4 @@ if [ "$ENABLE_FHIR2SQL" == true ]; then
log INFO "Dashboard setup detected -- will start Dashboard backend and FHIR2SQL service." log INFO "Dashboard setup detected -- will start Dashboard backend and FHIR2SQL service."
OVERRIDE+=" -f ./$PROJECT/modules/fhir2sql-compose.yml" OVERRIDE+=" -f ./$PROJECT/modules/fhir2sql-compose.yml"
DASHBOARD_DB_PASSWORD="$(generate_simple_password 'fhir2sql')" DASHBOARD_DB_PASSWORD="$(generate_simple_password 'fhir2sql')"
FOCUS_ENDPOINT_TYPE="blaze-and-sql"
fi fi

View File

@ -19,18 +19,10 @@ services:
- traefik-forward-auth - traefik-forward-auth
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
# Router with Authentication
- "traefik.http.routers.id-manager.rule=PathPrefix(`/id-manager`)" - "traefik.http.routers.id-manager.rule=PathPrefix(`/id-manager`)"
- "traefik.http.services.id-manager.loadbalancer.server.port=8080"
- "traefik.http.routers.id-manager.tls=true" - "traefik.http.routers.id-manager.tls=true"
- "traefik.http.routers.id-manager.middlewares=traefik-forward-auth-idm" - "traefik.http.routers.id-manager.middlewares=traefik-forward-auth-idm"
- "traefik.http.routers.id-manager.service=id-manager-service"
# Router without Authentication
- "traefik.http.routers.id-manager-compatibility.rule=PathPrefix(`/id-manager/paths/translator/getIds`)"
- "traefik.http.routers.id-manager-compatibility.tls=true"
- "traefik.http.routers.id-manager-compatibility.service=id-manager-service"
# Definition of Service
- "traefik.http.services.id-manager-service.loadbalancer.server.port=8080"
- "traefik.http.services.id-manager-service.loadbalancer.server.scheme=http"
patientlist: patientlist:
image: docker.verbis.dkfz.de/bridgehead/mainzelliste image: docker.verbis.dkfz.de/bridgehead/mainzelliste
@ -65,7 +57,7 @@ services:
- "/tmp/bridgehead/patientlist/:/docker-entrypoint-initdb.d/" - "/tmp/bridgehead/patientlist/:/docker-entrypoint-initdb.d/"
traefik-forward-auth: traefik-forward-auth:
image: docker.verbis.dkfz.de/cache/oauth2-proxy/oauth2-proxy:latest image: docker.verbis.dkfz.de/cache/oauth2-proxy/oauth2-proxy:v7.6.0
environment: environment:
- http_proxy=http://forward_proxy:3128 - http_proxy=http://forward_proxy:3128
- https_proxy=http://forward_proxy:3128 - https_proxy=http://forward_proxy:3128
@ -75,7 +67,6 @@ services:
- OAUTH2_PROXY_CLIENT_ID=bridgehead-${SITE_ID} - OAUTH2_PROXY_CLIENT_ID=bridgehead-${SITE_ID}
- OAUTH2_PROXY_CLIENT_SECRET=${IDMANAGER_AUTH_CLIENT_SECRET} - OAUTH2_PROXY_CLIENT_SECRET=${IDMANAGER_AUTH_CLIENT_SECRET}
- OAUTH2_PROXY_COOKIE_SECRET=${IDMANAGER_AUTH_COOKIE_SECRET} - OAUTH2_PROXY_COOKIE_SECRET=${IDMANAGER_AUTH_COOKIE_SECRET}
- OAUTH2_PROXY_COOKIE_NAME=_BRIDGEHEAD_oauth2_idm
- OAUTH2_PROXY_COOKIE_DOMAINS=.${HOST} - OAUTH2_PROXY_COOKIE_DOMAINS=.${HOST}
- OAUTH2_PROXY_HTTP_ADDRESS=:4180 - OAUTH2_PROXY_HTTP_ADDRESS=:4180
- OAUTH2_PROXY_REVERSE_PROXY=true - OAUTH2_PROXY_REVERSE_PROXY=true
@ -101,12 +92,5 @@ services:
forward_proxy: forward_proxy:
condition: service_healthy condition: service_healthy
ccp-patient-project-identificator:
image: samply/ccp-patient-project-identificator
container_name: bridgehead-ccp-patient-project-identificator
environment:
MAINZELLISTE_APIKEY: ${IDMANAGER_LOCAL_PATIENTLIST_APIKEY}
SITE_NAME: ${SITE_NAME}
volumes: volumes:
patientlist-db-data: patientlist-db-data:

View File

@ -1,10 +1,6 @@
version: "3.7" version: "3.7"
services: services:
landing:
deploy:
replicas: 0 #deactivate landing page
blaze: blaze:
image: docker.verbis.dkfz.de/cache/samply/blaze:0.28 image: docker.verbis.dkfz.de/cache/samply/blaze:0.28
container_name: bridgehead-kr-blaze container_name: bridgehead-kr-blaze

View File

@ -1,8 +1,6 @@
version: "3.7" version: "3.7"
services: services:
landing: landing:
deploy:
replicas: 1 #reactivate if lens is in use
container_name: lens_federated-search container_name: lens_federated-search
image: docker.verbis.dkfz.de/ccp/lens:${SITE_ID} image: docker.verbis.dkfz.de/ccp/lens:${SITE_ID}
labels: labels:

View File

@ -171,10 +171,8 @@ optimizeBlazeMemoryUsage() {
if [ $available_system_memory_chunks -eq 0 ]; then if [ $available_system_memory_chunks -eq 0 ]; then
log WARN "Only ${BLAZE_MEMORY_CAP} system memory available for Blaze. If your Blaze stores more than 128000 fhir ressources it will run significally slower." log WARN "Only ${BLAZE_MEMORY_CAP} system memory available for Blaze. If your Blaze stores more than 128000 fhir ressources it will run significally slower."
export BLAZE_RESOURCE_CACHE_CAP=128000; export BLAZE_RESOURCE_CACHE_CAP=128000;
export BLAZE_CQL_CACHE_CAP=32;
else else
export BLAZE_RESOURCE_CACHE_CAP=$((available_system_memory_chunks * 312500)) export BLAZE_RESOURCE_CACHE_CAP=$((available_system_memory_chunks * 312500))
export BLAZE_CQL_CACHE_CAP=$((($system_memory_in_mb/4)/16));
fi fi
fi fi
} }

View File

@ -0,0 +1,20 @@
-----BEGIN CERTIFICATE-----
MIIDNTCCAh2gAwIBAgIUcGXxIZMxUOoI2kf8FArsOvQfvwwwDQYJKoZIhvcNAQEL
BQAwFjEUMBIGA1UEAxMLQnJva2VyLVJvb3QwHhcNMjQxMDMxMDkxOTUwWhcNMzQx
MDI5MDkyMDIwWjAWMRQwEgYDVQQDEwtCcm9rZXItUm9vdDCCASIwDQYJKoZIhvcN
AQEBBQADggEPADCCAQoCggEBALX+8X4r2mWki4HLs2E5dXR9oGL+8Zos1s9Rmeaz
FgxnpKf6wlop4ZlJd01Pgi3HNFo7XPFi76zalRsHS+rWN3tOy6r5KIjCYiqPb3AY
luZuy7jAQOBGHKODVfJH1QCRqsvEwRbOU6nNFAkMcjSxt5+PmwB1U7+Kvmly4sYI
i4t/gyVvcfEsiZ5LYQ7IpEf+or2Ugpb6j4KlTn+gKFzSfgl+yRhE0bnFEf0eBa+r
HLLpq4hL16+pb6/WZ4DfM9QDioX6Tj2Hje9Va4RJ2dROENuq5sJugdE28hH9qEwE
2bmKh6qvblgwkI3rJFkYH+scBtLEUH0KJY+SZ1iYHkoEaCkCAwEAAaN7MHkwDgYD
VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOe/txl3B7Sd
NFE+615Z3rfzqBR4MB8GA1UdIwQYMBaAFOe/txl3B7SdNFE+615Z3rfzqBR4MBYG
A1UdEQQPMA2CC0Jyb2tlci1Sb290MA0GCSqGSIb3DQEBCwUAA4IBAQA3kVJlBOHn
Tscsk1FKLYNWE/fr3oUNPUYzXi4lln+UNdRHSdXUPzBp4B5oIi3uymdYg2Rzq9Su
/xjE7++thgQJ37l/DpCm/TUmUFfH5ZqcaMPA+L21mw9G129teCP1nVuXjtYhwnBk
fRiz1tzpO1rZCxC+vxIhcPeYSKbaAQTywtJu0MpduGFrfIwLtrxa4GLRQFD06KPx
Ijq6Pt6kC2abcYtKCMCWmpzttQAq4csWbmWINKkD6GMkuJVpzEx3csg8rCyPCaX0
HedLiKRqaSOzDRnIWfD2CQX6qMg8TNtnxFnZTlc9honxnwcGaeLZKNEg+1oPA40V
NOffBIMF4DAV
-----END CERTIFICATE-----

View File

@ -0,0 +1,91 @@
version: "3.7"
services:
beam-proxy:
image: samply/beam-proxy:develop-sockets
container_name: bridgehead-beam-proxy
environment:
BROKER_URL: ${BROKER_URL}
PROXY_ID: ${PROXY_ID}
PRIVKEY_FILE: /run/secrets/proxy.pem
ALL_PROXY: http://forward_proxy:3128
TLS_CA_CERTIFICATES_DIR: /conf/trusted-ca-certs
ROOTCERT_FILE: /conf/root.crt.pem
APP_beamsel_KEY: ${BEAMSEL_SECRET}
secrets:
- proxy.pem
depends_on:
- "forward_proxy"
volumes:
- /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro
- /srv/docker/bridgehead/minimal/modules/onko.root.pem:/conf/root.crt.pem:ro
postgres:
image: postgres:9.5-alpine
container_name: bridgehead-onkofdz-postgres
environment:
POSTGRES_DB: mainzelliste-sel
POSTGRES_USER: mainzelliste-sel
POSTGRES_PASSWORD: ${MAINZELLISTE_DB_PASSWORD}
volumes:
# - ./postgres-logs:/var/log/postgresql
- ml-data:/var/lib/postgresql/data
depends_on:
- secureepilinker
mainzelliste:
image: medicalinformatics/mainzelliste:secureepilinker-alpha
container_name: bridgehead-onkofdz-mainzelliste
environment:
ML_API_KEY: ${LOCAL_SEL_API_KEY}
ML_DB_HOST: postgres
ML_DB_PORT: 5432
ML_DB_USER: mainzelliste-sel
ML_DB_NAME: mainzelliste-sel
ML_DB_PASS: ${MAINZELLISTE_DB_PASSWORD}
ML_LOCAL_ID: ${SITE_ID}
ML_LOCAL_SEL_URL: http://secureepilinker:8161
ML_LOCAL_CALLBACK_LINK_URL: http://mainzelliste:8080/Communicator/linkCallback
ML_LOCAL_CALLBACK_MATCH_URL: http://mainzelliste:8080/Communicator/matchCallback/${REMOTE_SEL_SITE}
ML_LOCAL_DATA_SERVICE_URL: http://mainzelliste:8080/Communicator/getAllRecords
ML_LOCAL_AUTHENTICATION_TYPE: apiKey
ML_LOCAL_API_KEY: ${LOCAL_SEL_API_KEY}
ML_SERVER_0_REMOTEID: ${REMOTE_SEL_SITE}
ML_SERVER_0_IDTYPE: link-${SITE_ID}-${REMOTE_SEL_SITE}
ML_SERVER_0_REMOTE_SEL_URL: http://beamsel:8080
ML_SERVER_0_APIKEY: ${REMOTE_SEL_API_KEY}
### Linkage Service not used for matching
ML_SERVER_0_LINKAGE_SERVICE_BASE_URL: ${LS_SEL_URL}
ML_SERVER_0_LINKAGE_SERVICE_AUTH_TYPE: apiKey
ML_SERVER_0_LINKAGE_SERVICE_SHARED_KEY: ${LS_SEL_SHARED_KEY}
ML_LOG_MODE: stdout #stdout=stdout everything else =logging in mainzelliste.log
ML_LOG_LEVEL: INFO
no_proxy: "localhost,secureepilinker"
volumes:
# - ./logs:/usr/local/tomcat/logs/
- /etc/bridgehead/onkofdz/config/mainzelliste.conf.docker:/run/secrets/mainzelliste.docker.conf
- /etc/bridgehead/onkofdz/config/sel.conf.docker:/run/secrets/sel.docker.conf
depends_on:
- postgres
- secureepilinker
secureepilinker:
image: docker.verbis.dkfz.de/onkofdz/secureepilinker:beamsel
container_name: bridgehead-onkofdz-secureepilinker
environment:
no_proxy: "mainzelliste,beamsel"
volumes:
- "/etc/bridgehead/onkofdz/config/epilinker.serverconf.json:/data/serverconf.json"
command: '-vvvv'
beamsel:
image: docker.verbis.dkfz.de/onkofdz/beam-sel
container_name: bridgehead-onkofdz-beamsel
environment:
BEAM_URL: "http://beam-proxy:8081"
BEAM_SECRET: ${BEAMSEL_SECRET}
BEAM_ID: beamsel.${PROXY_ID}
SEL_ADDR: "secureepilinker:8161"
depends_on:
- secureepilinker
volumes:
ml-data:
secrets:
proxy.pem:
file: /etc/bridgehead/pki/${SITE_ID}.priv.pem

View File

@ -0,0 +1,15 @@
#!/bin/bash
if [ -n "${ENABLE_ONKOFDZ}" ]; then
BROKER_ID=test.broker.onkofdz.samply.de
BROKER_URL=https://${BROKER_ID}
PROXY_ID=${SITE_ID}.${BROKER_ID}
BEAMSEL_SECRET="$(cat /proc/sys/kernel/random/uuid | sed 's/[-]//g' | head -c 20)"
SUPPORT_EMAIL=tobias.kussel@dkfz-heidelberg.de
PRIVATEKEYFILENAME=/etc/bridgehead/pki/${SITE_ID}.priv.pem
BROKER_URL_FOR_PREREQ=$BROKER_URL
log INFO "Loading OnkoFDZ module"
OVERRIDE+=" -f ./$PROJECT/modules/onkofdz-compose.yml"
fi