2022-02-16 09:59:53 +01:00
|
|
|
#!/bin/bash -e
|
|
|
|
|
2022-10-28 10:12:21 +02:00
|
|
|
detectCompose() {
|
|
|
|
if [[ "$(docker compose version 2>/dev/null)" == *"Docker Compose version"* ]]; then
|
|
|
|
COMPOSE="docker compose"
|
2023-10-10 10:43:22 +02:00
|
|
|
else
|
2022-10-28 10:12:21 +02:00
|
|
|
COMPOSE="docker-compose"
|
|
|
|
# This is intended to fail on startup in the next prereq check.
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2023-10-05 09:43:57 +02:00
|
|
|
setupProxy() {
|
2023-10-24 09:01:22 +02:00
|
|
|
### Note: As the current data protection concepts do not allow communication via HTTP,
|
|
|
|
### we are not setting a proxy for HTTP requests.
|
2023-10-20 15:59:24 +02:00
|
|
|
|
2023-10-24 09:01:22 +02:00
|
|
|
local http="no"
|
|
|
|
local https="no"
|
2023-10-20 15:59:24 +02:00
|
|
|
if [ $HTTPS_PROXY_URL ]; then
|
2023-10-24 09:23:24 +02:00
|
|
|
local proto="$(echo $HTTPS_PROXY_URL | grep :// | sed -e 's,^\(.*://\).*,\1,g')"
|
|
|
|
local fqdn="$(echo ${HTTPS_PROXY_URL/$proto/})"
|
2023-10-24 09:12:18 +02:00
|
|
|
local hostport=$(echo $HTTPS_PROXY_URL | sed -e "s,$proto,,g" | cut -d/ -f1)
|
2023-10-24 09:07:06 +02:00
|
|
|
HTTPS_PROXY_HOST="$(echo $hostport | sed -e 's,:.*,,g')"
|
|
|
|
HTTPS_PROXY_PORT="$(echo $hostport | sed -e 's,^.*:,:,g' -e 's,.*:\([0-9]*\).*,\1,g' -e 's,[^0-9],,g')"
|
2023-10-20 15:59:24 +02:00
|
|
|
if [[ ! -z "$HTTPS_PROXY_USERNAME" && ! -z "$HTTPS_PROXY_PASSWORD" ]]; then
|
2023-10-24 09:12:18 +02:00
|
|
|
local proto="$(echo $HTTPS_PROXY_URL | grep :// | sed -e 's,^\(.*://\).*,\1,g')"
|
2023-10-24 09:01:22 +02:00
|
|
|
local fqdn="$(echo ${HTTPS_PROXY_URL/$proto/})"
|
2023-10-20 15:59:24 +02:00
|
|
|
HTTPS_PROXY_FULL_URL="$(echo $proto$HTTPS_PROXY_USERNAME:$HTTPS_PROXY_PASSWORD@$fqdn)"
|
|
|
|
https="authenticated"
|
|
|
|
else
|
|
|
|
HTTPS_PROXY_FULL_URL=$HTTPS_PROXY_URL
|
|
|
|
https="unauthenticated"
|
|
|
|
fi
|
2023-10-05 09:43:57 +02:00
|
|
|
fi
|
2023-10-20 15:59:24 +02:00
|
|
|
|
2023-10-24 09:01:22 +02:00
|
|
|
log INFO "Configuring proxy servers: $http http proxy (we're not supporting unencrypted comms), $https https proxy"
|
2023-10-24 09:23:24 +02:00
|
|
|
export HTTPS_PROXY_HOST HTTPS_PROXY_PORT HTTPS_PROXY_FULL_URL
|
2023-10-05 09:43:57 +02:00
|
|
|
}
|
|
|
|
|
2022-02-16 09:59:53 +01:00
|
|
|
exitIfNotRoot() {
|
|
|
|
if [ "$EUID" -ne 0 ]; then
|
2022-05-17 15:55:25 +02:00
|
|
|
log "ERROR" "Please run as root"
|
2022-10-06 10:45:50 +02:00
|
|
|
fail_and_report 1 "Please run as root"
|
2022-02-16 09:59:53 +01:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2022-05-31 13:55:40 +02:00
|
|
|
checkOwner(){
|
|
|
|
COUNT=$(find $1 ! -user $2 |wc -l)
|
|
|
|
if [ $COUNT -gt 0 ]; then
|
|
|
|
log ERROR "$COUNT files in $1 are not owned by user $2. Run find $1 ! -user $2 to see them, chown -R $2 $1 to correct this issue."
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2022-05-09 15:13:38 +02:00
|
|
|
printUsage() {
|
2023-08-15 15:42:42 +02:00
|
|
|
echo "Usage: bridgehead start|stop|is-running|update|install|uninstall|adduser|enroll PROJECTNAME"
|
2022-05-17 18:04:15 +02:00
|
|
|
echo "PROJECTNAME should be one of ccp|bbmri"
|
2022-05-09 15:13:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
checkRequirements() {
|
2022-05-17 18:04:15 +02:00
|
|
|
if ! lib/prerequisites.sh $@; then
|
2022-05-17 15:55:25 +02:00
|
|
|
log "ERROR" "Validating Prerequisites failed, please fix the error(s) above this line."
|
2022-10-06 10:45:50 +02:00
|
|
|
fail_and_report 1 "Validating prerequisites failed."
|
2022-05-09 15:13:38 +02:00
|
|
|
else
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
}
|
2022-05-12 18:23:52 +02:00
|
|
|
|
|
|
|
fetchVarsFromVault() {
|
2022-05-13 14:11:14 +02:00
|
|
|
[ -e /etc/bridgehead/vault.conf ] && source /etc/bridgehead/vault.conf
|
|
|
|
|
|
|
|
if [ -z "$BW_MASTERPASS" ] || [ -z "$BW_CLIENTID" ] || [ -z "$BW_CLIENTSECRET" ]; then
|
2022-05-17 15:55:25 +02:00
|
|
|
log "ERROR" "Please supply correct credentials in /etc/bridgehead/vault.conf."
|
2022-05-13 14:11:14 +02:00
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
set +e
|
|
|
|
|
2023-09-27 09:22:11 +02:00
|
|
|
PASS=$(BW_MASTERPASS="$BW_MASTERPASS" BW_CLIENTID="$BW_CLIENTID" BW_CLIENTSECRET="$BW_CLIENTSECRET" docker run --rm -e BW_MASTERPASS -e BW_CLIENTID -e BW_CLIENTSECRET -e http_proxy docker.verbis.dkfz.de/cache/samply/bridgehead-vaultfetcher:latest $@)
|
2022-05-13 14:11:14 +02:00
|
|
|
RET=$?
|
|
|
|
|
|
|
|
if [ $RET -ne 0 ]; then
|
|
|
|
echo "Code: $RET"
|
|
|
|
echo $PASS
|
|
|
|
return $RET
|
|
|
|
fi
|
|
|
|
|
|
|
|
eval $(echo -e "$PASS" | sed 's/\r//g')
|
|
|
|
|
|
|
|
set -e
|
2022-05-12 18:23:52 +02:00
|
|
|
|
|
|
|
return 0
|
|
|
|
}
|
2022-05-16 09:21:42 +02:00
|
|
|
|
2022-05-31 09:22:38 +02:00
|
|
|
fetchVarsFromVaultByFile() {
|
|
|
|
VARS_TO_FETCH=""
|
|
|
|
|
|
|
|
for line in $(cat $@); do
|
2022-05-31 13:40:25 +02:00
|
|
|
if [[ $line =~ .*=[\"]*\<VAULT\>[\"]*.* ]]; then
|
2022-05-31 09:22:38 +02:00
|
|
|
VARS_TO_FETCH+="$(echo -n $line | sed 's/=.*//') "
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [ -z "$VARS_TO_FETCH" ]; then
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
log INFO "Fetching $(echo $VARS_TO_FETCH | wc -w) secrets from Vault ..."
|
|
|
|
|
|
|
|
fetchVarsFromVault $VARS_TO_FETCH
|
|
|
|
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2022-05-31 13:56:12 +02:00
|
|
|
assertVarsNotEmpty() {
|
|
|
|
MISSING_VARS=""
|
|
|
|
|
|
|
|
for VAR in $@; do
|
|
|
|
if [ -z "${!VAR}" ]; then
|
|
|
|
MISSING_VARS+="$VAR "
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [ -n "$MISSING_VARS" ]; then
|
|
|
|
log "ERROR" "Mandatory variables not defined: $MISSING_VARS"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2022-10-06 10:45:50 +02:00
|
|
|
fixPermissions() {
|
|
|
|
CHOWN=$(which chown)
|
|
|
|
sudo $CHOWN -R bridgehead /etc/bridgehead /srv/docker/bridgehead
|
|
|
|
}
|
|
|
|
|
|
|
|
source lib/monitoring.sh
|
|
|
|
|
2022-11-04 15:26:27 +01:00
|
|
|
report_error() {
|
2022-05-17 18:04:15 +02:00
|
|
|
CODE=$1
|
|
|
|
shift
|
|
|
|
log ERROR "$@"
|
|
|
|
hc_send $CODE "$@"
|
2022-11-04 15:26:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
fail_and_report() {
|
|
|
|
report_error $@
|
2022-10-06 10:45:50 +02:00
|
|
|
exit $1
|
|
|
|
}
|
|
|
|
|
2022-11-04 13:09:11 +01:00
|
|
|
setHostname() {
|
|
|
|
if [ -z "$HOST" ]; then
|
2022-12-07 15:46:19 +01:00
|
|
|
export HOST=$(hostname -f | tr "[:upper:]" "[:lower:]")
|
2022-11-04 13:09:11 +01:00
|
|
|
log DEBUG "Using auto-detected hostname $HOST."
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2022-12-13 16:51:32 +01:00
|
|
|
# Takes 1) The Backup Directory Path 2) The name of the Service to be backuped
|
|
|
|
# Creates 3 Backups: 1) For the past seven days 2) For the current month and 3) for each calendar week
|
|
|
|
createEncryptedPostgresBackup(){
|
|
|
|
docker exec "$2" bash -c 'pg_dump -U $POSTGRES_USER $POSTGRES_DB --format=p --no-owner --no-privileges' | \
|
|
|
|
# TODO: Encrypt using /etc/bridgehead/pki/${SITE_ID}.priv.pem | \
|
|
|
|
tee "$1/$2/$(date +Last-%A).sql" | \
|
|
|
|
tee "$1/$2/$(date +%Y-%m).sql" > \
|
|
|
|
"$1/$2/$(date +%Y-KW%V).sql"
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-11-29 08:36:05 +01:00
|
|
|
# from: https://gist.github.com/sj26/88e1c6584397bb7c13bd11108a579746
|
|
|
|
# ex. use: retry 5 /bin/false
|
|
|
|
function retry {
|
|
|
|
local retries=$1
|
|
|
|
shift
|
|
|
|
|
|
|
|
local count=0
|
|
|
|
until "$@"; do
|
|
|
|
exit=$?
|
|
|
|
wait=$((2 ** $count))
|
|
|
|
count=$(($count + 1))
|
|
|
|
if [ $count -lt $retries ]; then
|
|
|
|
echo "Retry $count/$retries exited with code $exit, retrying in $wait seconds..."
|
|
|
|
sleep $wait
|
|
|
|
else
|
|
|
|
echo "Retry $count/$retries exited with code $exit, giving up."
|
|
|
|
return $exit
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2023-03-08 10:00:38 +01:00
|
|
|
function bk_is_running {
|
2023-03-08 10:37:37 +01:00
|
|
|
detectCompose
|
2023-05-10 12:54:05 +02:00
|
|
|
RUNNING="$($COMPOSE -p $PROJECT -f minimal/docker-compose.yml -f ./$PROJECT/docker-compose.yml $OVERRIDE ps -q)"
|
2023-03-08 10:00:38 +01:00
|
|
|
NUMBEROFRUNNING=$(echo "$RUNNING" | wc -l)
|
2023-03-08 10:37:37 +01:00
|
|
|
if [ $NUMBEROFRUNNING -ge 2 ]; then
|
2023-03-08 10:00:38 +01:00
|
|
|
return 0
|
|
|
|
else
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2023-08-07 13:00:37 +02:00
|
|
|
function do_enroll_inner {
|
|
|
|
PARAMS=""
|
|
|
|
|
|
|
|
MANUAL_PROXY_ID="${1:-$PROXY_ID}"
|
|
|
|
if [ -z "$MANUAL_PROXY_ID" ]; then
|
|
|
|
log ERROR "No Proxy ID set"
|
|
|
|
exit 1
|
|
|
|
else
|
|
|
|
log INFO "Enrolling Beam Proxy Id $MANUAL_PROXY_ID"
|
|
|
|
fi
|
|
|
|
|
|
|
|
SUPPORT_EMAIL="${2:-$SUPPORT_EMAIL}"
|
|
|
|
if [ -n "$SUPPORT_EMAIL" ]; then
|
|
|
|
PARAMS+="--admin-email $SUPPORT_EMAIL"
|
|
|
|
fi
|
2023-07-27 15:38:29 +02:00
|
|
|
|
2023-09-27 09:22:11 +02:00
|
|
|
docker run --rm -v /etc/bridgehead/pki:/etc/bridgehead/pki docker.verbis.dkfz.de/cache/samply/beam-enroll:latest --output-file $PRIVATEKEYFILENAME --proxy-id $MANUAL_PROXY_ID $PARAMS
|
2023-08-07 13:00:37 +02:00
|
|
|
chmod 600 $PRIVATEKEYFILENAME
|
|
|
|
}
|
|
|
|
|
|
|
|
function do_enroll {
|
|
|
|
do_enroll_inner $@
|
2023-08-15 14:24:19 +02:00
|
|
|
}
|
2023-07-27 15:38:29 +02:00
|
|
|
|
|
|
|
add_basic_auth_user() {
|
2023-08-15 14:24:19 +02:00
|
|
|
USER="${1}"
|
|
|
|
PASSWORD="${2}"
|
|
|
|
NAME="${3}"
|
|
|
|
PROJECT="${4}"
|
|
|
|
FILE="/etc/bridgehead/${PROJECT}.local.conf"
|
|
|
|
ENCRY_CREDENTIALS="$(docker run --rm docker.verbis.dkfz.de/cache/httpd:alpine htpasswd -nb $USER $PASSWORD | tr -d '\n' | tr -d '\r')"
|
|
|
|
if [ -f $FILE ] && grep -R -q "$NAME=" $FILE # if a specific basic auth user already exists:
|
|
|
|
then
|
|
|
|
sed -i "/$NAME/ s|='|='$ENCRY_CREDENTIALS,|" $FILE
|
|
|
|
else
|
|
|
|
echo -e "\n## Basic Authentication Credentials for:\n$NAME='$ENCRY_CREDENTIALS'" >> $FILE;
|
|
|
|
fi
|
|
|
|
log DEBUG "Saving clear text credentials in $FILE. If wanted, delete them manually."
|
|
|
|
sed -i "/^$NAME/ s|$|\n# User: $USER\n# Password: $PASSWORD|" $FILE
|
2023-08-17 13:21:20 +02:00
|
|
|
}
|
2024-03-18 11:03:50 +01:00
|
|
|
|
|
|
|
function clone_repo_if_nonexistent() {
|
|
|
|
local repo_url="$1" # First argument: Repository URL
|
|
|
|
local target_dir="$2" # Second argument: Target directory
|
|
|
|
local branch_name="$3" # Third argument: Branch name
|
|
|
|
|
|
|
|
echo Repo directory: $target_dir
|
|
|
|
|
|
|
|
# Check if the target directory exists
|
|
|
|
if [ ! -d "$target_dir" ]; then
|
|
|
|
echo "Directory '$target_dir' does not exist. Cloning the repository..."
|
|
|
|
# Clone the repository
|
|
|
|
git clone "$repo_url" "$target_dir"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Change to the cloned directory
|
|
|
|
cd "$target_dir"
|
|
|
|
|
|
|
|
# Checkout the specified branch
|
|
|
|
git checkout "$branch_name"
|
|
|
|
echo "Checked out branch '$branch_name'."
|
|
|
|
|
|
|
|
cd -
|
|
|
|
}
|
|
|
|
|
|
|
|
function clone_transfair_if_nonexistent() {
|
|
|
|
local base_dir="$1"
|
|
|
|
|
2024-06-18 15:28:37 +02:00
|
|
|
clone_repo_if_nonexistent https://github.com/samply/transFAIR.git $base_dir/transfair ehds2_develop
|
2024-03-18 11:03:50 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function clone_focus_if_nonexistent() {
|
|
|
|
local base_dir="$1"
|
|
|
|
|
|
|
|
clone_repo_if_nonexistent https://github.com/samply/focus.git $base_dir/focus ehds2
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
function build_transfair() {
|
|
|
|
local base_dir="$1"
|
|
|
|
|
|
|
|
# We only take the touble to build transfair if:
|
|
|
|
#
|
|
|
|
# 1. There is no data lock file (which means that no ETL has yet been run) and
|
|
|
|
# 2. There is data available.
|
2024-03-19 09:07:56 +01:00
|
|
|
if [ -f ../ecdc/data/*.[cC][sS][vV] ] && [ ! -f ../ecdc/data/lock ]; then
|
2024-03-18 11:03:50 +01:00
|
|
|
cd $base_dir/transfair
|
|
|
|
docker build --progress=plain -t samply/transfair --no-cache .
|
|
|
|
cd -
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
function build_focus() {
|
|
|
|
local base_dir="$1"
|
|
|
|
|
|
|
|
cd $base_dir/focus
|
|
|
|
docker build --progress=plain -f DockerfileWithBuild -t samply/focus --no-cache .
|
|
|
|
cd -
|
|
|
|
}
|
|
|
|
|