Compare commits

...

175 Commits

Author SHA1 Message Date
5ca134c35d Changed: image used for cbioportal and teiler backend 2024-02-13 15:12:46 +01:00
c565d14ee4 Changed:
- used image for cbioportal in cbioportal-compose to the Harbor image

- Ran tests to make sure cbioportal and cbioportal API work in Teiler backend.
2024-02-13 15:12:46 +01:00
a9c09c0d05 Added: cBioPortal 2024-02-13 15:12:46 +01:00
2f663c45e8 Add dsCCPhos 2024-02-13 15:12:46 +01:00
ebbe64abee Only sync secrets on startup 2024-02-13 15:12:46 +01:00
26c9e1286d fix: Use strong pw for opal 2024-02-13 15:12:46 +01:00
92984d24f3 fix: Generate stable passwords 2024-02-13 15:12:46 +01:00
abedfdaf64 Added: volume for opal metadata db (III) 2024-02-13 15:12:46 +01:00
c751b72e0e Added: volume for opal metadata db (II) 2024-02-13 15:12:46 +01:00
65eee84a4f Added: volume for opal metadata db 2024-02-13 15:12:45 +01:00
c0f497255c Add test sites 2024-02-13 15:12:45 +01:00
56c2955b5d Add central token manager beam id 2024-02-13 15:12:45 +01:00
0ced9b0e4e Added: allowed-groups 2024-02-13 15:12:45 +01:00
bb23d6f25b Added: Enable MTBA and Enable DataSHIELD to Teiler Backend 2024-02-13 15:12:45 +01:00
8ac4e36ac1 Default values for MTBA 2024-02-13 15:12:45 +01:00
4e809f66cb Use develop version of mtba 2024-02-13 15:12:45 +01:00
ca9f88421c Use last version of mtba 2024-02-13 15:12:45 +01:00
8579bc879f Only users of group DataSHIELD can use R-Studio 2024-02-13 15:12:45 +01:00
18dda72e84 teiler-orchestrator and teiler-dashboard latest 2024-02-13 15:12:45 +01:00
Jan
164d1a66fb fix: undo permission changes on startup 2024-02-13 15:12:45 +01:00
Jan
ca7772421b fix: only change permissions on related files 2024-02-13 15:12:45 +01:00
2ec44c9d48 fix: opal ssl cert 2024-02-13 15:12:45 +01:00
9231c99141 fix: chown syntax 2024-02-13 15:12:45 +01:00
2542c478c1 fix: beam connect site renaming 2024-02-13 15:12:45 +01:00
7fb27efae3 fix: Change permissions on new bridgehead dirs 2024-02-13 15:12:45 +01:00
efd26aa761 fix: generate the right beam connect mappings 2024-02-13 15:12:45 +01:00
cd36ab455b feat: Add token-manager to beam 2024-02-13 15:12:45 +01:00
e9eccd5cab fix: Restrict rstudio network access 2024-02-13 15:12:45 +01:00
381633d4a0 move OAUTH2_SECRET 2024-02-13 15:12:45 +01:00
30760075a6 add default template-ids of exporter and reporter 2024-02-13 15:12:44 +01:00
1299ee2ab4 mtba develop 2024-02-13 15:12:44 +01:00
fe56cbbc19 add /oauth2/callback and /mtba to Keycloak private client 2024-02-13 15:12:44 +01:00
bd784e703a mtba latest 2024-02-13 15:12:44 +01:00
109426cf43 Use develop branch for mtba 2024-02-13 15:12:44 +01:00
40d6c7cae5 fix: public client generation 2024-02-13 15:12:44 +01:00
a3c7a002fd Better redirect url handeling 2024-02-13 15:12:44 +01:00
91903fae24 Add oauth2_proxy 2024-02-13 15:12:44 +01:00
ea0435bee3 Add comment about PASSWORD and DISABLE_AUTH in R-Studio 2024-02-13 15:12:44 +01:00
ad1b00d16e Add proxy to R-Studio for loading R packages 2024-02-13 15:12:44 +01:00
e696d3b5dd Add Keycloak to MTBA 2024-02-13 15:12:44 +01:00
b683d07f48 Add keycloak configuration 2024-02-13 15:12:44 +01:00
eea53cd877 Add R-Studio Admin Password 2024-02-13 15:12:44 +01:00
b0ced71197 Add generate_password function 2024-02-13 15:12:44 +01:00
185143c084 Bugfix: Export /var/cache/bridgehead/secrets as environment variables 2024-02-13 15:12:44 +01:00
0a1070c3a5 Account for ip address host values 2024-02-13 15:12:44 +01:00
8505057863 Remove port handeling when generating redirect url 2024-02-13 15:12:44 +01:00
72d37c87f9 Add function generate_redirect_urls 2024-02-13 15:12:44 +01:00
8aa851bfe4 Update oidc provider to new url 2024-02-13 15:12:44 +01:00
67831ba57b Generate addtional redirect url 2024-02-13 15:12:43 +01:00
0545189cec Integrate central Keycloak in Teiler 2024-02-13 15:12:43 +01:00
faf46f9fea Make sure path exists 2024-02-13 15:12:43 +01:00
c8e215199c Setup hostname earlier 2024-02-13 15:12:43 +01:00
c2b994e0d1 Update to new secret-sync semantics 2024-02-13 15:12:43 +01:00
4cd66f6689 Add secret sync to the bridgehead 2024-02-13 15:12:43 +01:00
e99913cdc3 Bugfix: Add version in every docker compose file 2024-02-13 15:12:43 +01:00
afacdae20e Remove unnecessary version of docker-compose.override files 2024-02-13 15:12:43 +01:00
33d955f17d Bugfix: Add version in every docker compose file 2024-02-13 15:12:43 +01:00
c0ff03da6b Remove unnecessary version of docker-compose.override files 2024-02-13 15:12:43 +01:00
6582db0523 MTBA 1.0.0 2024-02-13 15:12:43 +01:00
27948c2a64 Remove nngmSetup in vars 2024-02-13 15:12:43 +01:00
c8e7cd5f25 Prevent creation of volumes 2024-02-13 15:12:43 +01:00
f2c55ade84 Use Bridgehead's internal http proxy 2024-02-13 15:12:43 +01:00
b20758bc3d Make Opal use proxy server 2024-02-13 15:12:43 +01:00
8d22ea2c19 Use newest version of beam-connect 2024-02-13 15:12:43 +01:00
9eaaeb5064 Update export template script: FHIR_QUERY to FHIR_PATH 2024-02-13 15:12:43 +01:00
c9b19a9368 /var/cache for mtba 2024-02-13 15:12:43 +01:00
b0b599c96b Add docu about login in teiler 2024-02-13 15:12:43 +01:00
b7c6b15425 Bugfix: datashield local.json as array 2024-02-13 15:12:43 +01:00
8e181a85c2 Prevent anonymous volume creation 2024-02-13 15:12:42 +01:00
b9dbfd4803 Bugfix: Exporter and Reporter /var/cache volumes 2024-02-13 15:12:42 +01:00
3389145c1d Postgres 15.4 in datashield, exporter and login 2024-02-13 15:12:42 +01:00
7d8b83b10c Update ccp/modules/mtba.md
Co-authored-by: Martin Lablans <6804500+lablans@users.noreply.github.com>
2024-02-13 15:12:42 +01:00
d799554f86 Bugfix: Add version of docker-compose 2024-02-13 15:12:42 +01:00
4a4a1d76a7 Make sure copy works and the correct owner is set 2024-02-13 15:12:42 +01:00
785dff29bf Add all sites 2024-02-13 15:12:42 +01:00
1f9733aa4d Auto generate mappings 2024-02-13 15:12:42 +01:00
c5578f81ee Add ccp to /var/cache/bridgehead/* volumes 2024-02-13 15:12:42 +01:00
fa5459c4dd Rewrite comments 2024-02-13 15:12:42 +01:00
31320a856c Move exporter db to /var/cache/bridgehead 2024-02-13 15:12:42 +01:00
a340b959c2 Exporter cache 2024-02-13 15:12:42 +01:00
c7d0bcf94d Rework commented sections 2024-02-13 15:12:42 +01:00
51ca9efe11 Autogenerate maps for Opal's beam-connect. To be completed by @Threated with a map-generator in the script. 2024-02-13 15:12:42 +01:00
1ef6142306 Cache opal in /var/cache/bridgehead 2024-02-13 15:12:42 +01:00
e71495c70f Bugfix: Add version in every docker compose file 2024-02-13 15:12:42 +01:00
e6b7a63ef7 #!/bin/bash -e 2024-02-13 15:12:42 +01:00
86ca652e8d Bugfix: LDM_AUTH instead of LDM_PASSWORD 2024-02-13 15:12:42 +01:00
d1a0153a6e Rename Teiler Backend, Teiler Dashboard and Teiler Orchestrator 2024-02-13 15:12:42 +01:00
5b88df4912 Add site to exporter and reporter 2024-02-13 15:12:41 +01:00
8345a9b1f1 Add forward proxy to teiler-core 2024-02-13 15:12:41 +01:00
3d210ea303 Bugfix: mtba labels 2024-02-13 15:12:41 +01:00
d8451c0426 Bugfix: exporter 2024-02-13 15:12:41 +01:00
b63eb141f6 Disable datashield 2024-02-13 15:12:41 +01:00
f24dfa43ef Bugfix: MTBA path prefix 2024-02-13 15:12:41 +01:00
b6e03f3a78 Enable datashield 2024-02-13 15:12:41 +01:00
8d1a7e7374 Comment Keycloak volume 2024-02-13 15:12:41 +01:00
4b705be264 Fix comment in login-compose.yml 2024-02-13 15:12:41 +01:00
3e79598533 Remove old comment of exporter-setup.sh 2024-02-13 15:12:41 +01:00
6a97630bf1 Comment on datashield volume 2024-02-13 15:12:41 +01:00
3fdda8b8d4 Comment on export and report volumes 2024-02-13 15:12:41 +01:00
de0193b99b Bugfix: JAVA_OPTS for exporter 2024-02-13 15:12:41 +01:00
08c9e6c822 Add mtba module documentation 2024-02-13 15:12:41 +01:00
61bfd661eb Add teiler-ui module documentation 2024-02-13 15:12:41 +01:00
9e0c7fb5ca Add some docs about beam-connect 2024-02-13 15:12:41 +01:00
ce5f94881f Add login module documentation 2024-02-13 15:12:41 +01:00
cb332142c3 Add Exporter module documentation 2024-02-13 15:12:41 +01:00
0cad79575b Add DataSHIELD module documentation 2024-02-13 15:12:41 +01:00
5f77168f65 Add JAVA_OPTS to reporter and exporter 2024-02-13 15:12:41 +01:00
361950ba7f Enable Login, Teiler and Exporter 2024-02-13 15:12:41 +01:00
cb659b87f1 Export and QB Curl templates 2024-02-13 15:12:40 +01:00
0801ebe5a5 Generate exporter api key automatically 2024-02-13 15:12:40 +01:00
ceb089ddd7 Add default language to ccp 2024-02-13 15:12:40 +01:00
f71832dd65 Remove updater cron of teiler-core 2024-02-13 15:12:40 +01:00
17036d459e Remove clean temp files configuration of exporter 2024-02-13 15:12:40 +01:00
146417f103 Add explanation why is the volume of exporter-db currently so important for us. 2024-02-13 15:12:40 +01:00
0cd0dc555b Add Teiler Admin to Keycloak 2024-02-13 15:12:40 +01:00
d3ecef5f04 Remove IS_DKTK_SITE 2024-02-13 15:12:40 +01:00
0caf98224f User default user rstudio in rstudio 2024-02-13 15:12:40 +01:00
412419494c Generate DATASHIELD_CONNECT_SECRET automatically 2024-02-13 15:12:40 +01:00
2f7797b1f1 Update ccp/modules/datashield-compose.yml 2024-02-13 15:12:40 +01:00
dcd31bfd7c Update ccp/modules/datashield-compose.yml 2024-02-13 15:12:40 +01:00
90afe71b1b Use LDM_PASSWORD for all admin passwords 2024-02-13 15:12:40 +01:00
d5924e64a3 Enable only if true 2024-02-13 15:12:40 +01:00
dfa12ca686 Remove todo in rstudio 2024-02-13 15:12:40 +01:00
213ac6370a Remove unnecessary version of docker-compose.override files 2024-02-13 15:12:40 +01:00
9732ef33b7 R-Server rock-base:6.3 2024-02-13 15:12:40 +01:00
dc6e0a349f User postgres if docker.verbis.dkfz.de 2024-02-13 15:12:40 +01:00
cb909dbcd4 Remove ports of beam-connect in datashield-compose.yml 2024-02-13 15:12:40 +01:00
69f1748ae7 Generate passwords only if modules are enabled 2024-02-13 15:12:40 +01:00
0cbbee6906 Add http scheme to exporter 2024-02-13 15:12:39 +01:00
7170612376 Migrate to new app key syntax 2024-02-13 15:12:39 +01:00
44134550ac update new broker 2024-02-13 15:12:39 +01:00
4ba5144140 Add parameter LOG_FHIR_VALIDATION to exporter 2024-02-13 15:12:39 +01:00
0d07a09296 Add reporter 2024-02-13 15:12:39 +01:00
5b62a1a248 Switch to no-auth branch of beam-connect 2024-02-13 15:12:39 +01:00
bdf943f94e secrets are readonly by default 2024-02-13 15:12:39 +01:00
bc476ed0a8 change to dockerhub image 2024-02-13 15:12:39 +01:00
f351cc931c Change beam-connect version and load opal cert 2024-02-13 15:12:39 +01:00
9dd3c24a6d Change cert permission and location 2024-02-13 15:12:39 +01:00
b4581e8b3a ccp.conf in teiler-core as secret 2024-02-13 15:12:39 +01:00
da49437ada Add opal certificate 2024-02-13 15:12:39 +01:00
1f4c2cad03 Remove unnecessary volumes 2024-02-13 15:12:39 +01:00
cae40aa39a Remove mongo db 2024-02-13 15:12:39 +01:00
8679d46b62 Add Opal Password in Exporter 2024-02-13 15:12:39 +01:00
1438c32455 expose beam connect ports 2024-02-13 15:12:39 +01:00
bfd33c0c1b Fix beam connect app id 2024-02-13 15:12:39 +01:00
0230303bd5 beam connect and move beam-connect config 2024-02-13 15:12:39 +01:00
a86e594e85 Experiment 2024-02-13 15:12:39 +01:00
0807e52160 Add beam connect to docekr-compose 2024-02-13 15:12:39 +01:00
75f9b73e98 Experiment 2024-02-13 15:12:38 +01:00
90e2f2e40b Add DataSHIELD 2024-02-13 15:12:38 +01:00
b0da23ac1c Bugfix: cross origins of exporter 2024-02-13 15:12:38 +01:00
c836a7554f Merge id-management-setup with main 2024-02-13 15:12:38 +01:00
fdd26083b6 Change salt string for exporter and login 2024-02-13 15:12:38 +01:00
3dcf83e4e8 Adapt teiler-ui to traefik 2024-02-13 15:12:38 +01:00
a13b851edd Add keycloak teiler app to teiler-ui 2024-02-13 15:12:38 +01:00
a2e9a86bc0 Change volume names for teiler components 2024-02-13 15:12:38 +01:00
1bb6df65fe Add stripprefix to teiler-ui 2024-02-13 15:12:38 +01:00
8e591773f4 Add stripprefix to teiler-core 2024-02-13 15:12:38 +01:00
d915debbbb Add forward strategy to teiler-core 2024-02-13 15:12:38 +01:00
2c7bdfd868 Add forward strategy to teiler-core 2024-02-13 15:12:38 +01:00
cf255bf08b Deactivate traffik for mtba 2024-02-13 15:12:38 +01:00
813a698dbb Tidy teiler and mtba volumes 2024-02-13 15:12:38 +01:00
30090f3633 Add Teiler Core 2024-02-13 15:12:38 +01:00
52f311ba1c Add Endpoint for Teiler 2024-02-13 15:12:38 +01:00
f32d124fda Update Teiler Core config 2024-02-13 15:12:38 +01:00
8140b6dd7b Add original Keycloak config 2024-02-13 15:12:38 +01:00
a63bdbde54 update keykloak config 2024-02-13 15:12:37 +01:00
7b24e2b427 beautiful config 2024-02-13 15:12:37 +01:00
5a9ab31fa4 Rename teiler to exporter (bugfix) 2024-02-13 15:12:37 +01:00
85d333cfe8 Rename teiler to exporter 2024-02-13 15:12:37 +01:00
0b61fc7f29 Configure login extern URLs 2024-02-13 15:12:37 +01:00
39981c310c Bugfix: services in teiler-ui-compose.yml 2024-02-13 15:12:37 +01:00
2f72ac2dc9 Change images of dktk-teiler and dktk-keycloak 2024-02-13 15:12:37 +01:00
81c0db0349 Add nngm and exliquid modules 2024-02-13 15:12:37 +01:00
005e5a1bf0 Add Teiler UI and Teiler module 2024-02-13 15:12:37 +01:00
33 changed files with 907 additions and 24 deletions

2
.gitignore vendored
View File

@ -1,7 +1,7 @@
##Ignore site configuration
.gitmodules
site-config/*
.idea
## Ignore site configuration
*/docker-compose.override.yml

View File

@ -1,3 +1,5 @@
version: "3.7"
services:
directory_sync_service:
image: "docker.verbis.dkfz.de/cache/samply/directory_sync_service"

View File

@ -41,6 +41,7 @@ case "$PROJECT" in
;;
esac
# TODO: Please add proper documentation for variable priorities (1. secrets, 2. vars, 3. PROJECT.local.conf, 4. PROJECT.conf, 5. ???
loadVars() {
# Load variables from /etc/bridgehead and /srv/docker/bridgehead
set -a
@ -50,6 +51,7 @@ loadVars() {
source /etc/bridgehead/$PROJECT.local.conf || fail_and_report 1 "Found /etc/bridgehead/$PROJECT.local.conf but failed to import"
fi
fetchVarsFromVaultByFile /etc/bridgehead/$PROJECT.conf || fail_and_report 1 "Unable to fetchVarsFromVaultByFile"
setHostname
[ -e ./$PROJECT/vars ] && source ./$PROJECT/vars
set +a
@ -64,7 +66,6 @@ loadVars() {
OVERRIDE+=" -f ./$PROJECT/docker-compose.override.yml"
fi
detectCompose
setHostname
setupProxy
# Set some project-independent default values
@ -89,6 +90,7 @@ case "$ACTION" in
loadVars
hc_send log "Bridgehead $PROJECT startup: Checking requirements ..."
checkRequirements
sync_secrets
hc_send log "Bridgehead $PROJECT startup: Requirements checked out. Now starting bridgehead ..."
exec $COMPOSE -p $PROJECT -f ./minimal/docker-compose.yml -f ./$PROJECT/docker-compose.yml $OVERRIDE up --abort-on-container-exit
;;

View File

@ -52,6 +52,12 @@ services:
- /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro
- /srv/docker/bridgehead/ccp/root.crt.pem:/conf/root.crt.pem:ro
traefik:
labels:
- "traefik.http.middlewares.oidcAuth.forwardAuth.address=http://oauth2_proxy:4180/"
- "traefik.http.middlewares.oidcAuth.forwardAuth.trustForwardHeader=true"
- "traefik.http.middlewares.oidcAuth.forwardAuth.authResponseHeaders=X-Auth-Request-Access-Token,Authorization"
volumes:
blaze-data:

View File

@ -0,0 +1,53 @@
version: '3.7'
services:
cbioportal:
# image: docker.verbis.dkfz.de/ccp/dktk-cbioportal:latest
image: dktk-cbioportal
container_name: bridgehead-cbioportal
environment:
DB_PASSWORD: ${CBIOPORTAL_DB_PASSWORD}
HTTP_RELATIVE_PATH: "/cbioportal"
UPLOAD_HTTP_RELATIVE_PATH: "/cbioportal-upload"
depends_on:
- cbioportal-database
- cbioportal-session
labels:
- "traefik.enable=true"
- "traefik.http.routers.cbioportal.rule=PathPrefix(`/cbioportal`)"
- "traefik.http.routers.cbioportal.service=cbioportal"
- "traefik.http.services.cbioportal.loadbalancer.server.port=8080"
- "traefik.http.routers.cbioportal.tls=true"
- "traefik.http.routers.cbioportal-upload.rule=PathPrefix(`/cbioportal-upload`)"
- "traefik.http.routers.cbioportal-upload.service=cbioportal-upload"
- "traefik.http.routers.cbioportal-upload.tls=true"
- "traefik.http.services.cbioportal-upload.loadbalancer.server.port=8001"
cbioportal-database:
image: docker.verbis.dkfz.de/ccp/dktk-cbioportal-database:latest
container_name: bridgehead-cbioportal-database
environment:
MYSQL_DATABASE: cbioportal
MYSQL_USER: cbio_user
MYSQL_PASSWORD: ${CBIOPORTAL_DB_PASSWORD}
MYSQL_ROOT_PASSWORD: ${CBIOPORTAL_DB_ROOT_PASSWORD}
volumes:
- /var/cache/bridgehead/ccp/cbioportal_db_data:/var/lib/mysql
cbioportal-session:
image: cbioportal/session-service:0.6.1
container_name: bridgehead-cbioportal-session
environment:
SERVER_PORT: 5000
JAVA_OPTS: -Dspring.data.mongodb.uri=mongodb://cbioportal-session-database:27017/session-service
depends_on:
- cbioportal-session-database
cbioportal-session-database:
image: mongo:4.2
container_name: bridgehead-cbioportal-session-database
environment:
MONGO_INITDB_DATABASE: session_service
volumes:
- /var/cache/bridgehead/ccp/cbioportal_session_db_data:/data/db

View File

@ -0,0 +1,8 @@
#!/bin/bash -e
if [ "$ENABLE_CBIOPORTAL" == true ]; then
log INFO "cBioPortal setup detected -- will start cBioPortal service."
OVERRIDE+=" -f ./$PROJECT/modules/cbioportal-compose.yml"
CBIOPORTAL_DB_PASSWORD="$(echo \"This is a salt string to generate one consistent password for the cbioportal database. It is not required to be secret.\" | openssl rsautl -sign -inkey /etc/bridgehead/pki/${SITE_ID}.priv.pem | base64 | head -c 30)"
CBIOPORTAL_DB_ROOT_PASSWORD="$(echo \"This is a salt string to generate one consistent root password for the cbioportal database. It is not required to be secret.\" | openssl rsautl -sign -inkey /etc/bridgehead/pki/${SITE_ID}.priv.pem | base64 | head -c 64)"
fi

10
ccp/modules/cbioportal.md Normal file
View File

@ -0,0 +1,10 @@
# CBioPortal Data uploader
## Usage
We have integrated an API that allows you to upload data directly to cbioportal without the need to have cbioportal installed in your system.
## Tech stack
We used Flask to add this feature

View File

@ -0,0 +1,161 @@
version: "3.7"
services:
rstudio:
container_name: bridgehead-rstudio
image: docker.verbis.dkfz.de/ccp/dktk-rstudio:latest
environment:
#DEFAULT_USER: "rstudio" # This line is kept for informational purposes
PASSWORD: "${RSTUDIO_ADMIN_PASSWORD}" # It is required, even if the authentication is disabled
DISABLE_AUTH: "true" # https://rocker-project.org/images/versioned/rstudio.html#how-to-use
HTTP_RELATIVE_PATH: "/rstudio"
ALL_PROXY: "http://forward_proxy:3128" # https://rocker-project.org/use/networking.html
labels:
- "traefik.enable=true"
- "traefik.http.routers.rstudio_ccp.rule=PathPrefix(`/rstudio`)"
- "traefik.http.services.rstudio_ccp.loadbalancer.server.port=8787"
- "traefik.http.middlewares.rstudio_ccp_strip.stripprefix.prefixes=/rstudio"
- "traefik.http.routers.rstudio_ccp.tls=true"
- "traefik.http.routers.rstudio_ccp.middlewares=oidcAuth,rstudio_ccp_strip"
networks:
- rstudio
opal:
container_name: bridgehead-opal
image: docker.verbis.dkfz.de/ccp/dktk-opal:latest
labels:
- "traefik.enable=true"
- "traefik.http.routers.opal_ccp.rule=PathPrefix(`/opal`)"
- "traefik.http.services.opal_ccp.loadbalancer.server.port=8080"
- "traefik.http.routers.opal_ccp.tls=true"
links:
- opal-rserver
- opal-db
environment:
JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC -Dhttps.proxyHost=forward_proxy -Dhttps.proxyPort=3128"
# OPAL_ADMINISTRATOR_USER: "administrator" # This line is kept for informational purposes
OPAL_ADMINISTRATOR_PASSWORD: "${OPAL_ADMIN_PASSWORD}"
POSTGRESDATA_HOST: "opal-db"
POSTGRESDATA_DATABASE: "opal"
POSTGRESDATA_USER: "opal"
POSTGRESDATA_PASSWORD: "${OPAL_DB_PASSWORD}"
ROCK_HOSTS: "opal-rserver:8085"
APP_URL: "https://${HOST}/opal"
APP_CONTEXT_PATH: "/opal"
OPAL_PRIVATE_KEY: "/run/secrets/opal-key.pem"
OPAL_CERTIFICATE: "/run/secrets/opal-cert.pem"
KEYCLOAK_URL: "${KEYCLOAK_URL}"
KEYCLOAK_REALM: "${KEYCLOAK_REALM}"
KEYCLOAK_CLIENT_ID: "${KEYCLOAK_PRIVATE_CLIENT_ID}"
KEYCLOAK_CLIENT_SECRET: "${OIDC_CLIENT_SECRET}"
KEYCLOAK_ADMIN_GROUP: "${KEYCLOAK_ADMIN_GROUP}"
TOKEN_MANAGER_PASSWORD: "${TOKEN_MANAGER_OPAL_PASSWORD}"
EXPORTER_PASSWORD: "${EXPORTER_OPAL_PASSWORD}"
BEAM_APP_ID: token-manager.${PROXY_ID}
BEAM_SECRET: ${TOKEN_MANAGER_SECRET}
BEAM_DATASHIELD_PROXY: request-manager
volumes:
- "/var/cache/bridgehead/ccp/opal-metadata-db:/srv" # Opal metadata
secrets:
- opal-cert.pem
- opal-key.pem
opal-db:
container_name: bridgehead-opal-db
image: docker.verbis.dkfz.de/cache/postgres:15.4-alpine
environment:
POSTGRES_PASSWORD: "${OPAL_DB_PASSWORD}" # Set in datashield-setup.sh
POSTGRES_USER: "opal"
POSTGRES_DB: "opal"
volumes:
- "/var/cache/bridgehead/ccp/opal-db:/var/lib/postgresql/data" # Opal project data (imported from exporter)
opal-rserver:
container_name: bridgehead-opal-rserver
image: docker.verbis.dkfz.de/ccp/dktk-rserver # datashield/rock-base + dsCCPhos
tmpfs:
- /srv
beam-connect:
image: docker.verbis.dkfz.de/cache/samply/beam-connect:develop
container_name: bridgehead-datashield-connect
environment:
PROXY_URL: "http://beam-proxy:8081"
TLS_CA_CERTIFICATES_DIR: /run/secrets
APP_ID: datashield-connect.${SITE_ID}.${BROKER_ID}
PROXY_APIKEY: ${DATASHIELD_CONNECT_SECRET}
DISCOVERY_URL: "./map/central.json"
LOCAL_TARGETS_FILE: "./map/local.json"
NO_AUTH: "true"
secrets:
- opal-cert.pem
depends_on:
- beam-proxy
volumes:
- /tmp/bridgehead/opal-map/:/map/:ro
networks:
- default
- rstudio
traefik:
networks:
- default
- rstudio
forward_proxy:
networks:
- default
- rstudio
beam-proxy:
environment:
APP_datashield-connect_KEY: ${DATASHIELD_CONNECT_SECRET}
APP_token-manager_KEY: ${TOKEN_MANAGER_SECRET}
# TODO: Allow users of group /DataSHIELD and KEYCLOAK_USER_GROUP at the same time:
# Maybe a solution would be (https://oauth2-proxy.github.io/oauth2-proxy/docs/configuration/overview/):
# --allowed-groups=/DataSHIELD,KEYCLOAK_USER_GROUP
oauth2_proxy:
image: quay.io/oauth2-proxy/oauth2-proxy
container_name: bridgehead_oauth2_proxy
command: >-
--allowed-group=/DataSHIELD
--oidc-groups-claim=${KEYCLOAK_GROUP_CLAIM}
--auth-logging=true
--whitelist-domain=${HOST}
--http-address="0.0.0.0:4180"
--reverse-proxy=true
--upstream="static://202"
--email-domain="*"
--cookie-name="_BRIDGEHEAD_oauth2"
--cookie-secret="${OAUTH2_PROXY_SECRET}"
--cookie-expire="12h"
--cookie-secure="true"
--cookie-httponly="true"
#OIDC settings
--provider="keycloak-oidc"
--provider-display-name="VerbIS Login"
--client-id="${KEYCLOAK_PRIVATE_CLIENT_ID}"
--client-secret="${OIDC_CLIENT_SECRET}"
--redirect-url="https://${HOST}${OAUTH2_CALLBACK}"
--oidc-issuer-url="${KEYCLOAK_ISSUER_URL}"
--scope="openid email profile"
--code-challenge-method="S256"
--skip-provider-button=true
#X-Forwarded-Header settings - true/false depending on your needs
--pass-basic-auth=true
--pass-user-headers=false
--pass-access-token=false
labels:
- "traefik.enable=true"
- "traefik.http.routers.oauth2_proxy.rule=Host(`${HOST}`) && PathPrefix(`/oauth2`, `/oauth2/callback`)"
- "traefik.http.services.oauth2_proxy.loadbalancer.server.port=4180"
- "traefik.http.routers.oauth2_proxy.tls=true"
secrets:
opal-cert.pem:
file: /tmp/bridgehead/opal-cert.pem
opal-key.pem:
file: /tmp/bridgehead/opal-key.pem
networks:
rstudio:

View File

@ -0,0 +1,157 @@
<template id="opal-ccp" source-id="blaze-store" opal-project="ccp-demo" target-id="opal" >
<container csv-filename="Patient-${TIMESTAMP}.csv" opal-table="patient" opal-entity-type="Patient">
<attribute csv-column="patient-id" opal-value-type="text" primary-key="true" val-fhir-path="Patient.id.value" anonym="Pat" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="dktk-id-global" opal-value-type="text" val-fhir-path="Patient.identifier.where(type.coding.code = 'Global').value.value"/>
<attribute csv-column="dktk-id-lokal" opal-value-type="text" val-fhir-path="Patient.identifier.where(type.coding.code = 'Lokal').value.value" />
<attribute csv-column="geburtsdatum" opal-value-type="date" val-fhir-path="Patient.birthDate.value"/>
<attribute csv-column="geschlecht" opal-value-type="text" val-fhir-path="Patient.gender.value" />
<attribute csv-column="datum_des_letztbekannten_vitalstatus" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = '75186-7').effective.value" join-fhir-path="/Observation.where(code.coding.code = '75186-7').subject.reference.value"/>
<attribute csv-column="vitalstatus" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '75186-7').value.coding.code.value" join-fhir-path="/Observation.where(code.coding.code = '75186-7').subject.reference.value"/>
<!--fehlt in ADT2FHIR--><attribute csv-column="tod_tumorbedingt" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '68343-3').value.coding.where(system = 'http://fhir.de/CodeSystem/bfarm/icd-10-gm').code.value" join-fhir-path="/Observation.where(code.coding.code = '68343-3').subject.reference.value"/>
<!--fehlt in ADT2FHIR--><attribute csv-column="todesursachen" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '68343-3').value.coding.where(system = 'http://dktk.dkfz.de/fhir/onco/core/CodeSystem/JNUCS').code.value" join-fhir-path="/Observation.where(code.coding.code = '68343-3').subject.reference.value"/>
</container>
<container csv-filename="Diagnosis-${TIMESTAMP}.csv" opal-table="diagnosis" opal-entity-type="Diagnosis">
<attribute csv-column="diagnosis-id" primary-key="true" opal-value-type="text" val-fhir-path="Condition.id.value" anonym="Dia" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Condition.subject.reference.value" anonym="Pat"/>
<attribute csv-column="primaerdiagnose" opal-value-type="text" val-fhir-path="Condition.code.coding.code.value"/>
<attribute csv-column="tumor_diagnosedatum" opal-value-type="date" val-fhir-path="Condition.onset.value"/>
<attribute csv-column="primaertumor_diagnosetext" opal-value-type="text" val-fhir-path="Condition.code.text.value"/>
<attribute csv-column="version_des_icd-10_katalogs" opal-value-type="integer" val-fhir-path="Condition.code.coding.version.value"/>
<attribute csv-column="lokalisation" opal-value-type="text" val-fhir-path="Condition.bodySite.coding.where(system = 'urn:oid:2.16.840.1.113883.6.43.1').code.value"/>
<attribute csv-column="icd-o_katalog_topographie_version" opal-value-type="text" val-fhir-path="Condition.bodySite.coding.where(system = 'urn:oid:2.16.840.1.113883.6.43.1').version.value"/>
<attribute csv-column="seitenlokalisation_nach_adt-gekid" opal-value-type="text" val-fhir-path="Condition.bodySite.coding.where(system = 'http://dktk.dkfz.de/fhir/onco/core/CodeSystem/SeitenlokalisationCS').code.value"/>
</container>
<container csv-filename="Progress-${TIMESTAMP}.csv" opal-table="progress" opal-entity-type="Progress">
<!--it would be better to generate a an ID, instead of extracting the ClinicalImpression id-->
<attribute csv-column="progress-id" primary-key="true" opal-value-type="text" val-fhir-path="ClinicalImpression.id.value" anonym="Pro" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="ClinicalImpression.problem.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="ClinicalImpression.subject.reference.value" anonym="Pat" />
<attribute csv-column="untersuchungs-_befunddatum_im_verlauf" opal-value-type="date" val-fhir-path="ClinicalImpression.effective.value" />
<!-- just for evaluation: redundant to Untersuchungs-, Befunddatum im Verlauf-->
<attribute csv-column="datum_lokales_oder_regionaeres_rezidiv" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = 'LA4583-6').effective.value" join-fhir-path="ClinicalImpression.finding.itemReference.reference.value" />
<attribute csv-column="gesamtbeurteilung_tumorstatus" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21976-6').value.coding.code.value" join-fhir-path="ClinicalImpression.finding.itemReference.reference.value"/>
<attribute csv-column="lokales_oder_regionaeres_rezidiv" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = 'LA4583-6').value.coding.code.value" join-fhir-path="ClinicalImpression.finding.itemReference.reference.value"/>
<attribute csv-column="lymphknoten-rezidiv" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = 'LA4370-8').value.coding.code.value" join-fhir-path="ClinicalImpression.finding.itemReference.reference.value" />
<attribute csv-column="fernmetastasen" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = 'LA4226-2').value.coding.code.value" join-fhir-path="ClinicalImpression.finding.itemReference.reference.value" />
</container>
<container csv-filename="Histology-${TIMESTAMP}.csv" opal-table="histology" opal-entity-type="Histology" >
<attribute csv-column="histology-id" primary-key="true" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').id" anonym="His" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').focus.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').subject.reference.value" anonym="Pat" />
<attribute csv-column="histologie_datum" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = '59847-4').effective.value"/>
<attribute csv-column="icd-o_katalog_morphologie_version" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').value.coding.version.value" />
<attribute csv-column="morphologie" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').value.coding.code.value"/>
<attribute csv-column="morphologie-freitext" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59847-4').value.text.value"/>
<attribute csv-column="grading" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '59542-1').value.coding.code.value" join-fhir-path="Observation.where(code.coding.code = '59847-4').hasMember.reference.value"/>
</container>
<container csv-filename="Metastasis-${TIMESTAMP}.csv" opal-table="metastasis" opal-entity-type="Metastasis" >
<attribute csv-column="metastasis-id" primary-key="true" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21907-1').id" anonym="Met" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21907-1').focus.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21907-1').subject.reference.value" anonym="Pat" />
<attribute csv-column="datum_fernmetastasen" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = '21907-1').effective.value"/>
<attribute csv-column="fernmetastasen_vorhanden" opal-value-type="boolean" val-fhir-path="Observation.where(code.coding.code = '21907-1').value.coding.code.value"/>
<attribute csv-column="lokalisation_fernmetastasen" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21907-1').bodySite.coding.code.value"/>
</container>
<container csv-filename="TNM-${TIMESTAMP}.csv" opal-table="tnm" opal-entity-type="TNM">
<attribute csv-column="tnm-id" primary-key="true" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').id" anonym="TNM" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').focus.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').subject.reference.value" anonym="Pat" />
<attribute csv-column="datum_der_tnm_dokumentation_datum_befund" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').effective.value"/>
<attribute csv-column="uicc_stadium" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').value.coding.code.value"/>
<attribute csv-column="tnm-t" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21905-5' or code.coding.code = '21899-0').value.coding.code.value"/>
<attribute csv-column="tnm-n" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21906-3' or code.coding.code = '21900-6').value.coding.code.value"/>
<attribute csv-column="tnm-m" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21907-1' or code.coding.code = '21901-4').value.coding.code.value"/>
<attribute csv-column="c_p_u_preefix_t" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21905-5' or code.coding.code = '21899-0').extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-TNMcpuPraefix').value.coding.code.value"/>
<attribute csv-column="c_p_u_preefix_n" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21906-3' or code.coding.code = '21900-6').extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-TNMcpuPraefix').value.coding.code.value"/>
<attribute csv-column="c_p_u_preefix_m" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21907-1' or code.coding.code = '21901-4').extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-TNMcpuPraefix').value.coding.code.value"/>
<attribute csv-column="tnm-y-symbol" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '59479-6' or code.coding.code = '59479-6').value.coding.code.value"/>
<attribute csv-column="tnm-r-symbol" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '21983-2' or code.coding.code = '21983-2').value.coding.code.value"/>
<attribute csv-column="tnm-m-symbol" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').component.where(code.coding.code = '42030-7' or code.coding.code = '42030-7').value.coding.code.value"/>
<!--nur bei UICC, nicht in ADT2FHIR--><attribute csv-column="tnm-version" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '21908-9' or code.coding.code = '21902-2').value.coding.version.value"/>
</container>
<container csv-filename="System-Therapy-${TIMESTAMP}.csv" opal-table="system-therapy" opal-entity-type="SystemTherapy">
<attribute csv-column="system-therapy-id" primary-key="true" opal-value-type="text" val-fhir-path="MedicationStatement.id" anonym="Sys" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="MedicationStatement.reasonReference.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="MedicationStatement.subject.reference.value" anonym="Pat" />
<attribute csv-column="systemische_therapie_stellung_zu_operativer_therapie" opal-value-type="text" val-fhir-path="MedicationStatement.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-StellungZurOp').value.coding.code.value"/>
<attribute csv-column="intention_chemotherapie" opal-value-type="text" val-fhir-path="MedicationStatement.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-SYSTIntention').value.coding.code.value"/>
<attribute csv-column="therapieart" opal-value-type="text" val-fhir-path="MedicationStatement.category.coding.code.value"/>
<attribute csv-column="systemische_therapie_beginn" opal-value-type="date" val-fhir-path="MedicationStatement.effective.start.value"/>
<attribute csv-column="systemische_therapie_ende" opal-value-type="date" val-fhir-path="MedicationStatement.effective.end.value"/>
<attribute csv-column="systemische_therapie_protokoll" opal-value-type="text" val-fhir-path="MedicationStatement.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-SystemischeTherapieProtokoll').value.text.value"/>
<attribute csv-column="systemische_therapie_substanzen" opal-value-type="text" val-fhir-path="MedicationStatement.medication.text.value"/>
<attribute csv-column="chemotherapie" opal-value-type="boolean" val-fhir-path="MedicationStatement.where(category.coding.code = 'CH').exists().value" />
<attribute csv-column="hormontherapie" opal-value-type="boolean" val-fhir-path="MedicationStatement.where(category.coding.code = 'HO').exists().value" />
<attribute csv-column="immuntherapie" opal-value-type="boolean" val-fhir-path="MedicationStatement.where(category.coding.code = 'IM').exists().value" />
<attribute csv-column="knochenmarktransplantation" opal-value-type="boolean" val-fhir-path="MedicationStatement.where(category.coding.code = 'KM').exists().value" />
<attribute csv-column="abwartende_strategie" opal-value-type="boolean" val-fhir-path="MedicationStatement.where(category.coding.code = 'WS').exists().value" />
</container>
<container csv-filename="Surgery-${TIMESTAMP}.csv" opal-table="surgery" opal-entity-type="Surgery">
<attribute csv-column="surgery-id" primary-key="true" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').id" anonym="Sur" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').reasonReference.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').subject.reference.value" anonym="Pat" />
<attribute csv-column="ops-code" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').code.coding.code.value"/>
<attribute csv-column="datum_der_op" opal-value-type="date" val-fhir-path="Procedure.where(category.coding.code = 'OP').performed.value"/>
<attribute csv-column="intention_op" opal-value-type="text" val-fhir-path="Procedure.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-OPIntention').value.coding.code.value"/>
<attribute csv-column="lokale_beurteilung_resttumor" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').outcome.coding.where(system = 'http://dktk.dkfz.de/fhir/onco/core/CodeSystem/LokaleBeurteilungResidualstatusCS').code.value" />
<attribute csv-column="gesamtbeurteilung_resttumor" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'OP').outcome.coding.where(system = 'http://dktk.dkfz.de/fhir/onco/core/CodeSystem/GesamtbeurteilungResidualstatusCS').code.value" />
</container>
<container csv-filename="Radiation-Therapy-${TIMESTAMP}.csv" opal-table="radiation-therapy" opal-entity-type="RadiationTherapy">
<attribute csv-column="radiation-therapy-id" primary-key="true" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'ST').id" anonym="Rad" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'ST').reasonReference.reference.value" anonym="Dia"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Procedure.where(category.coding.code = 'ST').subject.reference.value" anonym="Pat" />
<attribute csv-column="strahlentherapie_stellung_zu_operativer_therapie" opal-value-type="text" val-fhir-path="Procedure.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-StellungZurOp').value.coding.code.value"/>
<attribute csv-column="intention_strahlentherapie" opal-value-type="text" val-fhir-path="Procedure.extension('http://dktk.dkfz.de/fhir/StructureDefinition/onco-core-Extension-SYSTIntention').value.coding.code.value" />
<attribute csv-column="strahlentherapie_beginn" opal-value-type="date" val-fhir-path="Procedure.where(category.coding.code = 'ST').performed.start.value"/>
<attribute csv-column="strahlentherapie_ende" opal-value-type="date" val-fhir-path="Procedure.where(category.coding.code = 'ST').performed.end.value"/>
</container>
<container csv-filename="Molecular-Marker-${TIMESTAMP}.csv" opal-table="molecular-marker" opal-entity-type="MolecularMarker">
<attribute csv-column="mol-marker-id" primary-key="true" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').id" anonym="Mol" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="diagnosis-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').focus.reference.value" anonym="Dia" />
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').subject.reference.value" anonym="Pat" />
<attribute csv-column="datum_der_datenerhebung" opal-value-type="date" val-fhir-path="Observation.where(code.coding.code = '69548-6').effective.value"/>
<attribute csv-column="marker" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').component.value.coding.code.value"/>
<attribute csv-column="status_des_molekularen_markers" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').value.coding.code.value" />
<attribute csv-column="zusaetzliche_alternative_dokumentation" opal-value-type="text" val-fhir-path="Observation.where(code.coding.code = '69548-6').value.text.value"/>
</container>
<container csv-filename="Sample-${TIMESTAMP}.csv" opal-table="sample" opal-entity-type="Sample">
<attribute csv-column="sample-id" primary-key="true" opal-value-type="text" val-fhir-path="Specimen.id" anonym="Sam" op="EXTRACT_RELATIVE_ID"/>
<attribute csv-column="patient-id" opal-value-type="text" val-fhir-path="Specimen.subject.reference.value" anonym="Pat" />
<attribute csv-column="entnahmedatum" opal-value-type="date" val-fhir-path="Specimen.collection.collectedDateTime.value"/>
<attribute csv-column="probenart" opal-value-type="text" val-fhir-path="Specimen.type.coding.code.value"/>
<attribute csv-column="status" opal-value-type="text" val-fhir-path="Specimen.status.code.value"/>
<attribute csv-column="projekt" opal-value-type="text" val-fhir-path="Specimen.identifier.system.value"/>
<!-- @TODO: it is still necessary to clarify whether it would not be better to take the quantity of collection.quantity -->
<attribute csv-column="menge" opal-value-type="integer" val-fhir-path="Specimen.container.specimenQuantity.value.value"/>
<attribute csv-column="einheit" opal-value-type="text" val-fhir-path="Specimen.container.specimenQuantity.unit.value"/>
<attribute csv-column="aliquot" opal-value-type="text" val-fhir-path="Specimen.parent.reference.exists().value" />
</container>
<fhir-rev-include>Observation:patient</fhir-rev-include>
<fhir-rev-include>Condition:patient</fhir-rev-include>
<fhir-rev-include>ClinicalImpression:patient</fhir-rev-include>
<fhir-rev-include>MedicationStatement:patient</fhir-rev-include>
<fhir-rev-include>Procedure:patient</fhir-rev-include>
<fhir-rev-include>Specimen:patient</fhir-rev-include>
</template>

View File

@ -0,0 +1,13 @@
[
"berlin",
"muenchen-lmu",
"dresden",
"freiburg",
"muenchen-tum",
"tuebingen",
"mainz",
"frankfurt",
"essen",
"dktk-datashield-test",
"dktk-test"
]

View File

@ -0,0 +1,33 @@
#!/bin/bash -e
if [ "$ENABLE_DATASHIELD" == true ]; then
log INFO "DataSHIELD setup detected -- will start DataSHIELD services."
OVERRIDE+=" -f ./$PROJECT/modules/datashield-compose.yml"
EXPORTER_OPAL_PASSWORD="$(generate_password \"exporter in Opal\")"
TOKEN_MANAGER_OPAL_PASSWORD="$(generate_password \"Token Manager in Opal\")"
OPAL_DB_PASSWORD="$(echo \"Opal DB\" | generate_simple_password)"
OPAL_ADMIN_PASSWORD="$(generate_password \"admin password for Opal\")"
RSTUDIO_ADMIN_PASSWORD="$(generate_password \"admin password for R-Studio\")"
DATASHIELD_CONNECT_SECRET="$(echo \"DataShield Connect\" | generate_simple_password)"
TOKEN_MANAGER_SECRET="$(echo \"Token Manager\" | generate_simple_password)"
if [ ! -e /tmp/bridgehead/opal-cert.pem ]; then
mkdir -p /tmp/bridgehead/
chown -R bridgehead:docker /tmp/bridgehead/
openssl req -x509 -newkey rsa:4096 -nodes -keyout /tmp/bridgehead/opal-key.pem -out /tmp/bridgehead/opal-cert.pem -days 3650 -subj "/CN=opal/C=DE"
chmod g+r /tmp/bridgehead/opal-key.pem
fi
mkdir -p /tmp/bridgehead/opal-map
jq -n '{"sites": input | map({
"name": .,
"id": .,
"virtualhost": "\(.):443",
"beamconnect": "datashield-connect.\(.).'"$BROKER_ID"'"
})}' ./$PROJECT/modules/datashield-mappings.json > /tmp/bridgehead/opal-map/central.json
jq -n '[{
"external": "'"$SITE_ID"':443",
"internal": "opal:8443",
"allowed": input | map("datashield-connect.\(.).'"$BROKER_ID"'")
}]' ./$PROJECT/modules/datashield-mappings.json > /tmp/bridgehead/opal-map/local.json
chown -R bridgehead:docker /tmp/bridgehead/
add_private_oidc_redirect_url "/opal/*"
fi

28
ccp/modules/datashield.md Normal file
View File

@ -0,0 +1,28 @@
# DataSHIELD
This module constitutes the infrastructure to run DataSHIELD within the bridghead.
For more information about DataSHIELD, please visit https://www.datashield.org/
## R-Studio
To connect to the different bridgeheads of the CCP through DataSHIELD, you can use your own R-Studio environment.
However, this R-Studio has already installed the DataSHIELD libraries and is integrated within the bridgehead.
This can save you some time for extra configuration of your R-Studio environment.
## Opal
This is the core of DataSHIELD. It is made up of Opal, a Postgres database and an R-server.
For more information about Opal, please visit https://opaldoc.obiba.org
### Opal
Opal is OBiBas core database application for biobanks.
### Opal-DB
Opal requires a database to import the data for DataSHIELD. We use a Postgres instance as database.
The data is imported within the bridgehead through the exporter.
### Opal-R-Server
R-Server to execute R scripts in DataSHIELD.
## Beam
### Beam-Connect
Beam-Connect is used to route http(s) traffic through beam to enable R-Studio to access data from other bridgeheads that have datashield enabled.
### Beam-Proxy
The usual beam proxy used for communication.

View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/bash -e
if [ -n "${ENABLE_DNPM}" ]; then
log INFO "DNPM setup detected (Beam.Connect) -- will start Beam.Connect for DNPM."

View File

@ -0,0 +1,6 @@
# Full Excel Export
curl --location --request POST 'https://${HOST}/ccp-exporter/request?query=Patient&query-format=FHIR_PATH&template-id=ccp&output-format=EXCEL' \
--header 'x-api-key: ${EXPORT_API_KEY}'
# QB
curl --location --request POST 'https://${HOST}/ccp-reporter/generate?template-id=ccp'

View File

@ -0,0 +1,67 @@
version: "3.7"
services:
exporter:
image: docker.verbis.dkfz.de/ccp/dktk-exporter:latest
container_name: bridgehead-ccp-exporter
environment:
JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC"
LOG_LEVEL: "INFO"
EXPORTER_API_KEY: "${EXPORTER_API_KEY}" # Set in exporter-setup.sh
CROSS_ORIGINS: "https://${HOST}"
EXPORTER_DB_USER: "exporter"
EXPORTER_DB_PASSWORD: "${EXPORTER_DB_PASSWORD}" # Set in exporter-setup.sh
EXPORTER_DB_URL: "jdbc:postgresql://exporter-db:5432/exporter"
HTTP_RELATIVE_PATH: "/ccp-exporter"
SITE: "${SITE_ID}"
HTTP_SERVLET_REQUEST_SCHEME: "https"
OPAL_PASSWORD: "${EXPORTER_OPAL_PASSWORD}"
labels:
- "traefik.enable=true"
- "traefik.http.routers.exporter_ccp.rule=PathPrefix(`/ccp-exporter`)"
- "traefik.http.services.exporter_ccp.loadbalancer.server.port=8092"
- "traefik.http.routers.exporter_ccp.tls=true"
- "traefik.http.middlewares.exporter_ccp_strip.stripprefix.prefixes=/ccp-exporter"
- "traefik.http.routers.exporter_ccp.middlewares=exporter_ccp_strip"
volumes:
- "/var/cache/bridgehead/ccp/exporter-files:/app/exporter-files/output"
exporter-db:
image: docker.verbis.dkfz.de/cache/postgres:15.4-alpine
container_name: bridgehead-ccp-exporter-db
environment:
POSTGRES_USER: "exporter"
POSTGRES_PASSWORD: "${EXPORTER_DB_PASSWORD}" # Set in exporter-setup.sh
POSTGRES_DB: "exporter"
volumes:
# Consider removing this volume once we find a solution to save Lens-queries to be executed in the explorer.
- "/var/cache/bridgehead/ccp/exporter-db:/var/lib/postgresql/data"
reporter:
image: docker.verbis.dkfz.de/ccp/dktk-reporter:latest
container_name: bridgehead-ccp-reporter
environment:
JAVA_OPTS: "-Xms1G -Xmx8G -XX:+UseG1GC"
LOG_LEVEL: "INFO"
CROSS_ORIGINS: "https://${HOST}"
HTTP_RELATIVE_PATH: "/ccp-reporter"
SITE: "${SITE_ID}"
EXPORTER_API_KEY: "${EXPORTER_API_KEY}" # Set in exporter-setup.sh
EXPORTER_URL: "http://exporter:8092"
LOG_FHIR_VALIDATION: "false"
HTTP_SERVLET_REQUEST_SCHEME: "https"
# In this initial development state of the bridgehead, we are trying to have so many volumes as possible.
# However, in the first executions in the CCP sites, this volume seems to be very important. A report is
# a process that can take several hours, because it depends on the exporter.
# There is a risk that the bridgehead restarts, losing the already created export.
volumes:
- "/var/cache/bridgehead/ccp/reporter-files:/app/reports"
labels:
- "traefik.enable=true"
- "traefik.http.routers.reporter_ccp.rule=PathPrefix(`/ccp-reporter`)"
- "traefik.http.services.reporter_ccp.loadbalancer.server.port=8095"
- "traefik.http.routers.reporter_ccp.tls=true"
- "traefik.http.middlewares.reporter_ccp_strip.stripprefix.prefixes=/ccp-reporter"
- "traefik.http.routers.reporter_ccp.middlewares=reporter_ccp_strip"

View File

@ -0,0 +1,8 @@
#!/bin/bash -e
if [ "$ENABLE_EXPORTER" == true ]; then
log INFO "Exporter setup detected -- will start Exporter service."
OVERRIDE+=" -f ./$PROJECT/modules/exporter-compose.yml"
EXPORTER_DB_PASSWORD="$(echo \"This is a salt string to generate one consistent password for the exporter. It is not required to be secret.\" | openssl rsautl -sign -inkey /etc/bridgehead/pki/${SITE_ID}.priv.pem | base64 | head -c 30)"
EXPORTER_API_KEY="$(echo \"This is a salt string to generate one consistent API KEY for the exporter. It is not required to be secret.\" | openssl rsautl -sign -inkey /etc/bridgehead/pki/${SITE_ID}.priv.pem | base64 | head -c 64)"
fi

15
ccp/modules/exporter.md Normal file
View File

@ -0,0 +1,15 @@
# Exporter and Reporter
## Exporter
The exporter is a REST API that exports the data of the different databases of the bridgehead in a set of tables.
It can accept different output formats as CSV, Excel, JSON or XML. It can also export data into Opal.
## Exporter-DB
It is a database to save queries for its execution in the exporter.
The exporter manages also the different executions of the same query in through the database.
## Reporter
This component is a plugin of the exporter that allows to create more complex Excel reports described in templates.
It is compatible with different template engines as Groovy, Thymeleaf,...
It is perfect to generate a document as our traditional CCP quality report.

View File

@ -1,4 +1,5 @@
version: "3.7"
services:
id-manager:
image: docker.verbis.dkfz.de/bridgehead/magicpl

View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/bash -e
function idManagementSetup() {
if [ -n "$IDMANAGER_UPLOAD_APIKEY" ]; then

View File

@ -0,0 +1,47 @@
version: "3.7"
services:
login-db:
image: docker.verbis.dkfz.de/cache/postgres:15.4-alpine
container_name: bridgehead-login-db
environment:
POSTGRES_USER: "keycloak"
POSTGRES_PASSWORD: "${KEYCLOAK_DB_PASSWORD}" # Set in login-setup.sh
POSTGRES_DB: "keycloak"
tmpfs:
- /var/lib/postgresql/data
# Consider removing this comment once we have collected experience in production.
# volumes:
# - "bridgehead-login-db:/var/lib/postgresql/data"
login:
image: docker.verbis.dkfz.de/ccp/dktk-keycloak:latest
container_name: bridgehead-login
environment:
KEYCLOAK_ADMIN: "admin"
KEYCLOAK_ADMIN_PASSWORD: "${LDM_AUTH}"
TEILER_ADMIN: "${PROJECT}"
TEILER_ADMIN_PASSWORD: "${LDM_AUTH}"
TEILER_ADMIN_FIRST_NAME: "${OPERATOR_FIRST_NAME}"
TEILER_ADMIN_LAST_NAME: "${OPERATOR_LAST_NAME}"
TEILER_ADMIN_EMAIL: "${OPERATOR_EMAIL}"
KC_DB_PASSWORD: "${KEYCLOAK_DB_PASSWORD}" # Set in login-setup.sh
KC_HOSTNAME_URL: "https://${HOST}/login"
KC_HOSTNAME_STRICT: "false"
KC_PROXY_ADDRESS_FORWARDING: "true"
TEILER_ORCHESTRATOR_EXTERN_URL: "https://${HOST}/ccp-teiler"
command:
- start-dev --import-realm --proxy edge --http-relative-path=/login
labels:
- "traefik.enable=true"
- "traefik.http.routers.login.rule=PathPrefix(`/login`)"
- "traefik.http.services.login.loadbalancer.server.port=8080"
- "traefik.http.routers.login.tls=true"
depends_on:
- login-db
# Consider removing this comment once we have collected experience in production.
#volumes:
# bridgehead-login-db:
# name: "bridgehead-login-db"

View File

@ -0,0 +1,7 @@
#!/bin/bash -e
if [ "$ENABLE_LOGIN" == true ]; then
log INFO "Login setup detected -- will start Login services."
OVERRIDE+=" -f ./$PROJECT/modules/login-compose.yml"
KEYCLOAK_DB_PASSWORD="$(generate_password \"local Keycloak\")"
fi

13
ccp/modules/login.md Normal file
View File

@ -0,0 +1,13 @@
# Login
The login component is a local Keycloak instance. In the future will be replaced by the central keycloak instance
or maybe can be used to add local identity providers to the bridgehead or just to simplify the configuration of
the central keycloak instance for the integration of every new bridgehead.
The basic configuration of our Keycloak instance is contained in a small json file.
### Teiler User
Currently, the local keycloak is used by the teiler. There is a basic admin user in the basic configuration of keycloak.
The user can be configured with the environment variables TEILER_ADMIN_XXX.
## Login-DB
Keycloak requires a local database for its configuration. However, as we use an initial json configuration file, if no
local identity provider is configured nor any local user, theoretically we don't need a volume for the login.

View File

@ -2,7 +2,8 @@ version: "3.7"
services:
mtba:
image: docker.verbis.dkfz.de/cache/samply/mtba:1.0.0
#image: docker.verbis.dkfz.de/cache/samply/mtba:latest
image: docker.verbis.dkfz.de/cache/samply/mtba:develop
container_name: bridgehead-mtba
environment:
BLAZE_STORE_URL: http://blaze:8080
@ -11,22 +12,30 @@ services:
ID_MANAGER_API_KEY: ${IDMANAGER_UPLOAD_APIKEY}
ID_MANAGER_PSEUDONYM_ID_TYPE: BK_${IDMANAGEMENT_FRIENDLY_ID}_L-ID
ID_MANAGER_URL: http://id-manager:8080/id-manager
PATIENT_CSV_FIRST_NAME_HEADER: ${MTBA_PATIENT_CSV_FIRST_NAME_HEADER}
PATIENT_CSV_LAST_NAME_HEADER: ${MTBA_PATIENT_CSV_LAST_NAME_HEADER}
PATIENT_CSV_GENDER_HEADER: ${MTBA_PATIENT_CSV_GENDER_HEADER}
PATIENT_CSV_BIRTHDAY_HEADER: ${MTBA_PATIENT_CSV_BIRTHDAY_HEADER}
PATIENT_CSV_FIRST_NAME_HEADER: ${MTBA_PATIENT_CSV_FIRST_NAME_HEADER:-FIRST_NAME}
PATIENT_CSV_LAST_NAME_HEADER: ${MTBA_PATIENT_CSV_LAST_NAME_HEADER:-LAST_NAME}
PATIENT_CSV_GENDER_HEADER: ${MTBA_PATIENT_CSV_GENDER_HEADER:-GENDER}
PATIENT_CSV_BIRTHDAY_HEADER: ${MTBA_PATIENT_CSV_BIRTHDAY_HEADER:-BIRTHDAY}
CBIOPORTAL_URL: http://cbioportal:8080
FILE_CHARSET: ${MTBA_FILE_CHARSET}
FILE_END_OF_LINE: ${MTBA_FILE_END_OF_LINE}
CSV_DELIMITER: ${MTBA_CSV_DELIMITER}
FILE_CHARSET: ${MTBA_FILE_CHARSET:-UTF-8}
FILE_END_OF_LINE: ${MTBA_FILE_END_OF_LINE:-LF}
CSV_DELIMITER: ${MTBA_CSV_DELIMITER:-TAB}
HTTP_RELATIVE_PATH: "/mtba"
KEYCLOAK_ADMIN_GROUP: "${KEYCLOAK_ADMIN_GROUP}"
KEYCLOAK_CLIENT_ID: "${KEYCLOAK_PRIVATE_CLIENT_ID}"
KEYCLOAK_CLIENT_SECRET: "${OIDC_CLIENT_SECRET}"
KEYCLOAK_REALM: "${KEYCLOAK_REALM}"
KEYCLOAK_URL: "${KEYCLOAK_URL}"
labels:
- "traefik.enable=true"
- "traefik.http.routers.mtba.rule=PathPrefix(`/`)"
- "traefik.http.services.mtba.loadbalancer.server.port=80"
- "traefik.http.routers.mtba.tls=true"
- "traefik.http.routers.mtba_ccp.rule=PathPrefix(`/mtba`)"
- "traefik.http.services.mtba_ccp.loadbalancer.server.port=8480"
- "traefik.http.routers.mtba_ccp.tls=true"
volumes:
- /tmp/bridgehead/mtba/input:/app/input
- /tmp/bridgehead/mtba/persist:/app/persist
- /var/cache/bridgehead/ccp/mtba/input:/app/input
- /var/cache/bridgehead/ccp/mtba/persist:/app/persist
# TODO: Include CBioPortal in Deployment ...
# NOTE: CBioPortal can't load data while the system is running. So after import of data bridgehead needs to be restarted!

View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/bash -e
function mtbaSetup() {
if [ -n "$ENABLE_MTBA" ];then
@ -8,5 +8,6 @@ function mtbaSetup() {
exit 1;
fi
OVERRIDE+=" -f ./$PROJECT/modules/mtba-compose.yml"
add_private_oidc_redirect_url "/mtba/*"
fi
}
}

6
ccp/modules/mtba.md Normal file
View File

@ -0,0 +1,6 @@
# Molecular Tumor Board Alliance (MTBA)
In this module, the genetic data to import is stored in a directory (/tmp/bridgehead/mtba/input). A process checks
regularly if there are files in the directory. The files are pseudonomized when the IDAT is provided. The files are
combined with clinical data of the blaze and imported in cBioPortal. On the other hand, this files are also imported in
Blaze.

View File

@ -1,4 +1,5 @@
version: "3.7"
volumes:
nngm-rest:
@ -21,9 +22,6 @@ services:
- "traefik.http.routers.connector.middlewares=connector_strip,auth-nngm"
volumes:
- nngm-rest:/var/log
traefik:
labels:
- "traefik.http.middlewares.auth-nngm.basicauth.users=${NNGM_AUTH}"

View File

@ -1,4 +1,4 @@
#!/bin/bash
#!/bin/bash -e
if [ -n "$NNGM_CTS_APIKEY" ]; then
log INFO "nNGM setup detected -- will start nNGM Connector."

View File

@ -0,0 +1,82 @@
version: "3.7"
services:
teiler-orchestrator:
image: docker.verbis.dkfz.de/cache/samply/teiler-orchestrator:latest
container_name: bridgehead-teiler-orchestrator
labels:
- "traefik.enable=true"
- "traefik.http.routers.teiler_orchestrator_ccp.rule=PathPrefix(`/ccp-teiler`)"
- "traefik.http.services.teiler_orchestrator_ccp.loadbalancer.server.port=9000"
- "traefik.http.routers.teiler_orchestrator_ccp.tls=true"
- "traefik.http.middlewares.teiler_orchestrator_ccp_strip.stripprefix.prefixes=/ccp-teiler"
- "traefik.http.routers.teiler_orchestrator_ccp.middlewares=teiler_orchestrator_ccp_strip"
environment:
TEILER_BACKEND_URL: "https://${HOST}/ccp-teiler-backend"
TEILER_DASHBOARD_URL: "https://${HOST}/ccp-teiler-dashboard"
DEFAULT_LANGUAGE: "${DEFAULT_LANGUAGE_LOWER_CASE}"
HTTP_RELATIVE_PATH: "/ccp-teiler"
teiler-dashboard:
image: docker.verbis.dkfz.de/cache/samply/teiler-dashboard:latest
container_name: bridgehead-teiler-dashboard
labels:
- "traefik.enable=true"
- "traefik.http.routers.teiler_dashboard_ccp.rule=PathPrefix(`/ccp-teiler-dashboard`)"
- "traefik.http.services.teiler_dashboard_ccp.loadbalancer.server.port=80"
- "traefik.http.routers.teiler_dashboard_ccp.tls=true"
- "traefik.http.middlewares.teiler_dashboard_ccp_strip.stripprefix.prefixes=/ccp-teiler-dashboard"
- "traefik.http.routers.teiler_dashboard_ccp.middlewares=teiler_dashboard_ccp_strip"
environment:
DEFAULT_LANGUAGE: "${DEFAULT_LANGUAGE}"
TEILER_BACKEND_URL: "https://${HOST}/ccp-teiler-backend"
KEYCLOAK_URL: "${KEYCLOAK_URL}"
KEYCLOAK_REALM: "${KEYCLOAK_REALM}"
KEYCLOAK_CLIENT_ID: "${KEYCLOAK_PUBLIC_CLIENT_ID}"
KEYCLOAK_TOKEN_GROUP: "${KEYCLOAK_GROUP_CLAIM}"
TEILER_ADMIN_NAME: "${OPERATOR_FIRST_NAME} ${OPERATOR_LAST_NAME}"
TEILER_ADMIN_EMAIL: "${OPERATOR_EMAIL}"
TEILER_ADMIN_PHONE: "${OPERATOR_PHONE}"
TEILER_PROJECT: "${PROJECT}"
EXPORTER_API_KEY: "${EXPORTER_API_KEY}"
TEILER_ORCHESTRATOR_URL: "https://${HOST}/ccp-teiler"
TEILER_DASHBOARD_HTTP_RELATIVE_PATH: "/ccp-teiler-dashboard"
TEILER_ORCHESTRATOR_HTTP_RELATIVE_PATH: "/ccp-teiler"
TEILER_USER: "${KEYCLOAK_USER_GROUP}"
TEILER_ADMIN: "${KEYCLOAK_ADMIN_GROUP}"
REPORTER_DEFAULT_TEMPLATE_ID: "ccp-qb"
EXPORTER_DEFAULT_TEMPLATE_ID: "ccp"
teiler-backend:
# image: docker.verbis.dkfz.de/ccp/dktk-teiler-backend:latest
image: dktk-teiler-backend
container_name: bridgehead-teiler-backend
labels:
- "traefik.enable=true"
- "traefik.http.routers.teiler_backend_ccp.rule=PathPrefix(`/ccp-teiler-backend`)"
- "traefik.http.services.teiler_backend_ccp.loadbalancer.server.port=8085"
- "traefik.http.routers.teiler_backend_ccp.tls=true"
- "traefik.http.middlewares.teiler_backend_ccp_strip.stripprefix.prefixes=/ccp-teiler-backend"
- "traefik.http.routers.teiler_backend_ccp.middlewares=teiler_backend_ccp_strip"
environment:
LOG_LEVEL: "INFO"
APPLICATION_PORT: "8085"
APPLICATION_ADDRESS: "${HOST}"
DEFAULT_LANGUAGE: "${DEFAULT_LANGUAGE}"
CONFIG_ENV_VAR_PATH: "/run/secrets/ccp.conf"
TEILER_ORCHESTRATOR_HTTP_RELATIVE_PATH: "/ccp-teiler"
TEILER_ORCHESTRATOR_URL: "https://${HOST}/ccp-teiler"
TEILER_DASHBOARD_DE_URL: "https://${HOST}/ccp-teiler-dashboard/de"
TEILER_DASHBOARD_EN_URL: "https://${HOST}/ccp-teiler-dashboard/en"
CENTRAX_URL: "${CENTRAXX_URL}"
HTTP_PROXY: "http://forward_proxy:3128"
ENABLE_MTBA: "${ENABLE_MTBA}"
ENABLE_DATASHIELD: "${ENABLE_DATASHIELD}"
secrets:
- ccp.conf
secrets:
ccp.conf:
file: /etc/bridgehead/ccp.conf

View File

@ -0,0 +1,7 @@
#!/bin/bash -e
if [ "$ENABLE_TEILER" == true ];then
log INFO "Teiler setup detected -- will start Teiler services."
OVERRIDE+=" -f ./$PROJECT/modules/teiler-compose.yml"
add_public_oidc_redirect_url "/ccp-teiler/*"
fi

View File

19
ccp/modules/teiler.md Normal file
View File

@ -0,0 +1,19 @@
# Teiler
This module orchestrates the different microfrontends of the bridgehead as a single page application.
## Teiler Orchestrator
Single SPA component that consists on the root HTML site of the single page application and a javascript code that
gets the information about the microfrontend calling the teiler backend and is responsible for registering them. With the
resulting mapping, it can initialize, mount and unmount the required microfrontends on the fly.
The microfrontends run independently in different containers and can be based on different frameworks (Angular, Vue, React,...)
This microfrontends can run as single alone but need an extension with Single-SPA (https://single-spa.js.org/docs/ecosystem).
There are also available three templates (Angular, Vue, React) to be directly extended to be used directly in the teiler.
## Teiler Dashboard
It consists on the main dashboard and a set of embedded services.
### Login
user and password in ccp.local.conf
## Teiler Backend
In this component, the microfrontends are configured.

View File

@ -7,7 +7,25 @@ SUPPORT_EMAIL=support-ccp@dkfz-heidelberg.de
PRIVATEKEYFILENAME=/etc/bridgehead/pki/${SITE_ID}.priv.pem
BROKER_URL_FOR_PREREQ=$BROKER_URL
DEFAULT_LANGUAGE=DE
DEFAULT_LANGUAGE_LOWER_CASE=${DEFAULT_LANGUAGE,,}
ENABLE_EXPORTER=true
ENABLE_TEILER=true
#ENABLE_DATASHIELD=true
KEYCLOAK_USER_GROUP="DKTK_CCP_$(capitalize_first_letter ${SITE_ID})"
KEYCLOAK_ADMIN_GROUP="DKTK_CCP_$(capitalize_first_letter ${SITE_ID})_Verwalter"
KEYCLOAK_PRIVATE_CLIENT_ID=${SITE_ID}-private
KEYCLOAK_PUBLIC_CLIENT_ID=${SITE_ID}-public
# TODO: Change Keycloak Realm to productive. "test-realm-01" is only for testing
KEYCLOAK_REALM="${KEYCLOAK_REALM:-test-realm-01}"
KEYCLOAK_URL="https://login.verbis.dkfz.de"
KEYCLOAK_ISSUER_URL="${KEYCLOAK_URL}/realms/${KEYCLOAK_REALM}"
KEYCLOAK_GROUP_CLAIM="groups"
OAUTH2_CALLBACK=/oauth2/callback
OAUTH2_PROXY_SECRET="$(echo \"This is a salt string to generate one consistent encryption key for the oauth2_proxy. It is not required to be secret.\" | openssl rsautl -sign -inkey /etc/bridgehead/pki/${SITE_ID}.priv.pem | base64 | head -c 32)"
add_private_oidc_redirect_url "${OAUTH2_CALLBACK}"
for module in $PROJECT/modules/*.sh
do
@ -17,4 +35,4 @@ done
idManagementSetup
mtbaSetup
adt2fhirRestSetup
adt2fhirRestSetup

View File

@ -239,3 +239,109 @@ add_basic_auth_user() {
log DEBUG "Saving clear text credentials in $FILE. If wanted, delete them manually."
sed -i "/^$NAME/ s|$|\n# User: $USER\n# Password: $PASSWORD|" $FILE
}
OIDC_PUBLIC_REDIRECT_URLS=${OIDC_PUBLIC_REDIRECT_URLS:-""}
OIDC_PRIVATE_REDIRECT_URLS=${OIDC_PRIVATE_REDIRECT_URLS:-""}
# Add a redirect url to the public oidc client of the bridgehead
function add_public_oidc_redirect_url() {
if [[ $OIDC_PUBLIC_REDIRECT_URLS == "" ]]; then
OIDC_PUBLIC_REDIRECT_URLS+="$(generate_redirect_urls $1)"
else
OIDC_PUBLIC_REDIRECT_URLS+=",$(generate_redirect_urls $1)"
fi
}
# Add a redirect url to the private oidc client of the bridgehead
function add_private_oidc_redirect_url() {
if [[ $OIDC_PRIVATE_REDIRECT_URLS == "" ]]; then
OIDC_PRIVATE_REDIRECT_URLS+="$(generate_redirect_urls $1)"
else
OIDC_PRIVATE_REDIRECT_URLS+=",$(generate_redirect_urls $1)"
fi
}
function sync_secrets() {
local delimiter=$'\x1E'
local secret_sync_args=""
if [[ $OIDC_PRIVATE_REDIRECT_URLS != "" ]]; then
secret_sync_args="OIDC:OIDC_CLIENT_SECRET:private;$OIDC_PRIVATE_REDIRECT_URLS"
fi
if [[ $OIDC_PRIVATE_REDIRECT_URLS != "" ]]; then
if [[ $secret_sync_args == "" ]]; then
secret_sync_args="OIDC:OIDC_PUBLIC:public;$OIDC_PUBLIC_REDIRECT_URLS"
else
secret_sync_args+="${delimiter}OIDC:OIDC_PUBLIC:public;$OIDC_PUBLIC_REDIRECT_URLS"
fi
fi
if [[ $secret_sync_args == "" ]]; then
return
fi
mkdir -p /var/cache/bridgehead/secrets/
touch /var/cache/bridgehead/secrets/oidc
chown -R bridgehead:docker /var/cache/bridgehead/secrets
# The oidc provider will need to be switched based on the project at some point I guess
docker run --rm \
-v /var/cache/bridgehead/secrets/oidc:/usr/local/cache \
-v $PRIVATEKEYFILENAME:/run/secrets/privkey.pem:ro \
-v /srv/docker/bridgehead/$PROJECT/root.crt.pem:/run/secrets/root.crt.pem:ro \
-v /etc/bridgehead/trusted-ca-certs:/conf/trusted-ca-certs:ro \
-e TLS_CA_CERTIFICATES_DIR=/conf/trusted-ca-certs \
-e HTTPS_PROXY=$HTTPS_PROXY_FULL_URL \
-e PROXY_ID=$PROXY_ID \
-e BROKER_URL=$BROKER_URL \
-e OIDC_PROVIDER=secret-sync-central.oidc-client-enrollment.$BROKER_ID \
-e SECRET_DEFINITIONS=$secret_sync_args \
docker.verbis.dkfz.de/cache/samply/secret-sync-local:latest
set -a # Export variables as environment variables
source /var/cache/bridgehead/secrets/*
set +a # Export variables in the regular way
}
capitalize_first_letter() {
input="$1"
capitalized="$(tr '[:lower:]' '[:upper:]' <<< ${input:0:1})${input:1}"
echo "$capitalized"
}
# Generate a string of ',' separated string of redirect urls relative to $HOST.
# $1 will be appended to the url
# If the host looks like dev-jan.inet.dkfz-heidelberg.de it will generate urls with dev-jan and the original $HOST as url Authorities
function generate_redirect_urls(){
local redirect_urls="https://${HOST}$1"
local host_without_proxy="$(echo "$HOST" | cut -d '.' -f1)"
# Only append second url if its different and the host is not an ip address
if [[ "$HOST" != "$host_without_proxy" && ! "$HOST" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
redirect_urls+=",https://$host_without_proxy$1"
fi
echo "$redirect_urls"
}
# This password contains at least one special char, a random number and a random upper and lower case letter
generate_password(){
local seed_text="$1"
local seed_num=$(awk 'BEGIN{FS=""} NR==1{print $10}' /etc/bridgehead/pki/${SITE_ID}.priv.pem | od -An -tuC)
local nums="1234567890"
local n=$(echo "$seed_num" | awk '{print $1 % 10}')
local random_digit=${nums:$n:1}
local n=$(echo "$seed_num" | awk '{print $1 % 26}')
local upper="ABCDEFGHIJKLMNOPQRSTUVWXYZ"
local lower="abcdefghijklmnopqrstuvwxyz"
local random_upper=${upper:$n:1}
local random_lower=${lower:$n:1}
local n=$(echo "$seed_num" | awk '{print $1 % 8}')
local special='@#$%^&+='
local random_special=${special:$n:1}
local combined_text="This is a salt string to generate one consistent password for ${seed_text}. It is not required to be secret."
local main_password=$(echo "${combined_text}" | openssl rsautl -sign -inkey "/etc/bridgehead/pki/${SITE_ID}.priv.pem" 2> /dev/null | base64 | head -c 26 | sed 's/\//A/g')
echo "${main_password}${random_digit}${random_upper}${random_lower}${random_special}"
}
# This password only contains alphanumeric characters
generate_simple_password(){
local seed_text="$1"
local combined_text="This is a salt string to generate one consistent password for ${seed_text}. It is not required to be secret."
echo "${combined_text}" | openssl rsautl -sign -inkey "/etc/bridgehead/pki/${SITE_ID}.priv.pem" 2> /dev/null | base64 | head -c 26 | sed 's/[+\/]/A/g'
}