From c6983351ca4b73998753bc0418323c47d6549dee Mon Sep 17 00:00:00 2001 From: David Gomez Date: Mon, 15 Nov 2021 12:53:51 +0100 Subject: [PATCH] Deprecate Airflow 1 (#10) --- .../airflow-scheduler/1/debian-10/Dockerfile | 36 -- .../1/debian-10/docker-compose.yml | 15 - .../opt/bitnami/.bitnami_components.json | 51 -- .../opt/bitnami/licenses/licenses.txt | 3 - .../opt/bitnami/scripts/libbitnami.sh | 51 -- .../opt/bitnami/scripts/libcomponent.sh | 65 --- .../prebuildfs/opt/bitnami/scripts/libfile.sh | 139 ----- .../prebuildfs/opt/bitnami/scripts/libfs.sh | 184 ------- .../prebuildfs/opt/bitnami/scripts/libhook.sh | 16 - .../prebuildfs/opt/bitnami/scripts/liblog.sh | 112 ---- .../prebuildfs/opt/bitnami/scripts/libnet.sh | 163 ------ .../prebuildfs/opt/bitnami/scripts/libos.sh | 448 ---------------- .../opt/bitnami/scripts/libpersistence.sh | 122 ----- .../opt/bitnami/scripts/libservice.sh | 273 ---------- .../opt/bitnami/scripts/libvalidations.sh | 264 ---------- .../opt/bitnami/scripts/libversion.sh | 49 -- .../opt/bitnami/scripts/libwebserver.sh | 458 ---------------- .../prebuildfs/usr/sbin/install_packages | 24 - .../bitnami/scripts/airflow-scheduler-env.sh | 97 ---- .../scripts/airflow-scheduler/entrypoint.sh | 41 -- .../scripts/airflow-scheduler/postunpack.sh | 28 - .../bitnami/scripts/airflow-scheduler/run.sh | 24 - .../scripts/airflow-scheduler/setup.sh | 23 - .../rootfs/opt/bitnami/scripts/libairflow.sh | 487 ------------------ .../bitnami/scripts/libairflowscheduler.sh | 153 ------ bitnami/airflow-scheduler/README.md | 4 - 26 files changed, 3330 deletions(-) delete mode 100644 bitnami/airflow-scheduler/1/debian-10/Dockerfile delete mode 100644 bitnami/airflow-scheduler/1/debian-10/docker-compose.yml delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/.bitnami_components.json delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/licenses/licenses.txt delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libbitnami.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libcomponent.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfile.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfs.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libhook.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/liblog.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libnet.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libos.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libpersistence.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libservice.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libvalidations.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libversion.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libwebserver.sh delete mode 100755 bitnami/airflow-scheduler/1/debian-10/prebuildfs/usr/sbin/install_packages delete mode 100644 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler-env.sh delete mode 100755 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/entrypoint.sh delete mode 100755 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/postunpack.sh delete mode 100755 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/run.sh delete mode 100755 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/setup.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflow.sh delete mode 100644 bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflowscheduler.sh diff --git a/bitnami/airflow-scheduler/1/debian-10/Dockerfile b/bitnami/airflow-scheduler/1/debian-10/Dockerfile deleted file mode 100644 index d4fc36bf7e15..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -FROM docker.io/bitnami/minideb:buster -LABEL maintainer "Bitnami " - -ENV BITNAMI_PKG_EXTRA_DIRS="/opt/bitnami/airflow/dags" \ - HOME="/" \ - OS_ARCH="amd64" \ - OS_FLAVOUR="debian-10" \ - OS_NAME="linux" - -COPY prebuildfs / -# Install required system packages and dependencies -RUN install_packages acl ca-certificates curl gzip libbsd0 libbz2-1.0 libc6 libcom-err2 libcurl4 libedit2 libffi6 libgcc1 libgcrypt20 libgmp10 libgnutls30 libgpg-error0 libgssapi-krb5-2 libhogweed4 libicu63 libidn2-0 libk5crypto3 libkeyutils1 libkrb5-3 libkrb5support0 libldap-2.4-2 liblzma5 libmariadb3 libncursesw6 libnettle6 libnghttp2-14 libp11-kit0 libpsl5 libreadline7 librtmp1 libsasl2-2 libsqlite3-0 libssh2-1 libssl1.1 libstdc++6 libtasn1-6 libtinfo6 libunistring2 libuuid1 libxml2 libxslt1.1 netbase procps tar zlib1g -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "wait-for-port" "1.0.1-1" --checksum 28dc75dff64df07e67b711d20859c24ebc996db0eaac06138553341d0f769299 -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "python" "3.8.12-5" --checksum 18aaf8247baa258f4e16de86673a3e799cd50bbef2824f0d2ae805c9c2068f11 -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "postgresql-client" "10.19.0-0" --checksum f32958c288efd50fb29133d614ce19dc46ff40add10fc82254b29c8a46df324c -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "ini-file" "1.4.1-0" --checksum 3d189e4b1fcdc330fb84c14a7c6fb296deff37d3142d9a17fe0c9a5dba51ef6d -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "git" "2.33.0-0" --checksum fd9a3245580fef6248f778efeba0a017675424f15ff16ace42c095496e4f02f3 -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "gosu" "1.14.0-0" --checksum 3e6fc37ca073b10a73a804d39c2f0c028947a1a596382a4f8ebe43dfbaa3a25e -RUN . /opt/bitnami/scripts/libcomponent.sh && component_unpack "airflow-scheduler" "1.10.15-6" --checksum aae0955681b124e78df1aa252deaf599568030ef9e55471b1b5bc959a0279619 -RUN chmod g+rwX /opt/bitnami - -COPY rootfs / -RUN /opt/bitnami/scripts/airflow-scheduler/postunpack.sh -ENV AIRFLOW_HOME="/opt/bitnami/airflow" \ - BITNAMI_APP_NAME="airflow-scheduler" \ - BITNAMI_IMAGE_VERSION="1.10.15-debian-10-r220" \ - LD_LIBRARY_PATH="/opt/bitnami/python/lib/:/opt/bitnami/airflow/venv/lib/python3.8/site-packages/numpy.libs/:$LD_LIBRARY_PATH" \ - LIBNSS_WRAPPER_PATH="/opt/bitnami/common/lib/libnss_wrapper.so" \ - LNAME="airflow" \ - NSS_WRAPPER_GROUP="/opt/bitnami/airflow/nss_group" \ - NSS_WRAPPER_PASSWD="/opt/bitnami/airflow/nss_passwd" \ - PATH="/opt/bitnami/common/bin:/opt/bitnami/python/bin:/opt/bitnami/postgresql/bin:/opt/bitnami/git/bin:/opt/bitnami/airflow/venv/bin:$PATH" - -USER 1001 -ENTRYPOINT [ "/opt/bitnami/scripts/airflow-scheduler/entrypoint.sh" ] -CMD [ "/opt/bitnami/scripts/airflow-scheduler/run.sh" ] diff --git a/bitnami/airflow-scheduler/1/debian-10/docker-compose.yml b/bitnami/airflow-scheduler/1/debian-10/docker-compose.yml deleted file mode 100644 index f519b1718ee6..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: '2' -services: - airflow-scheduler: - image: docker.io/bitnami/airflow-scheduler:1 - environment: - - AIRFLOW_DATABASE_NAME=bitnami_airflow - - AIRFLOW_DATABASE_USERNAME=bn_airflow - - AIRFLOW_DATABASE_PASSWORD=bitnami1 - - AIRFLOW_EXECUTOR=CeleryExecutor - volumes: - - airflow_scheduler_data:/bitnami - command: ["tail", "-f", "/dev/null"] -volumes: - airflow_scheduler_data: - driver: local diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/.bitnami_components.json b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/.bitnami_components.json deleted file mode 100644 index 8049ef0bc084..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/.bitnami_components.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "airflow-scheduler": { - "arch": "amd64", - "digest": "aae0955681b124e78df1aa252deaf599568030ef9e55471b1b5bc959a0279619", - "distro": "debian-10", - "type": "NAMI", - "version": "1.10.15-6" - }, - "git": { - "arch": "amd64", - "digest": "fd9a3245580fef6248f778efeba0a017675424f15ff16ace42c095496e4f02f3", - "distro": "debian-10", - "type": "NAMI", - "version": "2.33.0-0" - }, - "gosu": { - "arch": "amd64", - "digest": "3e6fc37ca073b10a73a804d39c2f0c028947a1a596382a4f8ebe43dfbaa3a25e", - "distro": "debian-10", - "type": "NAMI", - "version": "1.14.0-0" - }, - "ini-file": { - "arch": "amd64", - "digest": "3d189e4b1fcdc330fb84c14a7c6fb296deff37d3142d9a17fe0c9a5dba51ef6d", - "distro": "debian-10", - "type": "NAMI", - "version": "1.4.1-0" - }, - "postgresql-client": { - "arch": "amd64", - "digest": "f32958c288efd50fb29133d614ce19dc46ff40add10fc82254b29c8a46df324c", - "distro": "debian-10", - "type": "NAMI", - "version": "10.19.0-0" - }, - "python": { - "arch": "amd64", - "digest": "18aaf8247baa258f4e16de86673a3e799cd50bbef2824f0d2ae805c9c2068f11", - "distro": "debian-10", - "type": "NAMI", - "version": "3.8.12-5" - }, - "wait-for-port": { - "arch": "amd64", - "digest": "28dc75dff64df07e67b711d20859c24ebc996db0eaac06138553341d0f769299", - "distro": "debian-10", - "type": "NAMI", - "version": "1.0.1-1" - } -} \ No newline at end of file diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/licenses/licenses.txt b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/licenses/licenses.txt deleted file mode 100644 index c76ba31f3b8a..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/licenses/licenses.txt +++ /dev/null @@ -1,3 +0,0 @@ -Bitnami containers ship with software bundles. You can find the licenses under: -/opt/bitnami/nami/COPYING -/opt/bitnami/[name-of-bundle]/licenses/[bundle-version].txt diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libbitnami.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libbitnami.sh deleted file mode 100644 index ef29e361dad1..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libbitnami.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -# -# Bitnami custom library - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh - -# Constants -BOLD='\033[1m' - -# Functions - -######################## -# Print the welcome page -# Globals: -# DISABLE_WELCOME_MESSAGE -# BITNAMI_APP_NAME -# Arguments: -# None -# Returns: -# None -######################### -print_welcome_page() { - if [[ -z "${DISABLE_WELCOME_MESSAGE:-}" ]]; then - if [[ -n "$BITNAMI_APP_NAME" ]]; then - print_image_welcome_page - fi - fi -} - -######################## -# Print the welcome page for a Bitnami Docker image -# Globals: -# BITNAMI_APP_NAME -# Arguments: -# None -# Returns: -# None -######################### -print_image_welcome_page() { - local github_url="https://github.com/bitnami/bitnami-docker-${BITNAMI_APP_NAME}" - - log "" - log "${BOLD}Welcome to the Bitnami ${BITNAMI_APP_NAME} container${RESET}" - log "Subscribe to project updates by watching ${BOLD}${github_url}${RESET}" - log "Submit issues and feature requests at ${BOLD}${github_url}/issues${RESET}" - log "" -} - diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libcomponent.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libcomponent.sh deleted file mode 100644 index 1d8c6bf24375..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libcomponent.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -# -# Library for managing Bitnami components - -# Constants -CACHE_ROOT="/tmp/bitnami/pkg/cache" -DOWNLOAD_URL="https://downloads.bitnami.com/files/stacksmith" - -# Functions - -######################## -# Download and unpack a Bitnami package -# Globals: -# OS_NAME -# OS_ARCH -# OS_FLAVOUR -# Arguments: -# $1 - component's name -# $2 - component's version -# Returns: -# None -######################### -component_unpack() { - local name="${1:?name is required}" - local version="${2:?version is required}" - local base_name="${name}-${version}-${OS_NAME}-${OS_ARCH}-${OS_FLAVOUR}" - local package_sha256="" - local directory="/opt/bitnami" - - # Validate arguments - shift 2 - while [ "$#" -gt 0 ]; do - case "$1" in - -c|--checksum) - shift - package_sha256="${1:?missing package checksum}" - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - - echo "Downloading $base_name package" - if [ -f "${CACHE_ROOT}/${base_name}.tar.gz" ]; then - echo "${CACHE_ROOT}/${base_name}.tar.gz already exists, skipping download." - cp "${CACHE_ROOT}/${base_name}.tar.gz" . - rm "${CACHE_ROOT}/${base_name}.tar.gz" - if [ -f "${CACHE_ROOT}/${base_name}.tar.gz.sha256" ]; then - echo "Using the local sha256 from ${CACHE_ROOT}/${base_name}.tar.gz.sha256" - package_sha256="$(< "${CACHE_ROOT}/${base_name}.tar.gz.sha256")" - rm "${CACHE_ROOT}/${base_name}.tar.gz.sha256" - fi - else - curl --remote-name --silent "${DOWNLOAD_URL}/${base_name}.tar.gz" - fi - if [ -n "$package_sha256" ]; then - echo "Verifying package integrity" - echo "$package_sha256 ${base_name}.tar.gz" | sha256sum --check - - fi - tar --directory "${directory}" --extract --gunzip --file "${base_name}.tar.gz" --no-same-owner --strip-components=2 "${base_name}/files/" - rm "${base_name}.tar.gz" -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfile.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfile.sh deleted file mode 100644 index 41ebaf7464f6..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfile.sh +++ /dev/null @@ -1,139 +0,0 @@ -#!/bin/bash -# -# Library for managing files - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/libos.sh - -# Functions - -######################## -# Replace a regex-matching string in a file -# Arguments: -# $1 - filename -# $2 - match regex -# $3 - substitute regex -# $4 - use POSIX regex. Default: true -# Returns: -# None -######################### -replace_in_file() { - local filename="${1:?filename is required}" - local match_regex="${2:?match regex is required}" - local substitute_regex="${3:?substitute regex is required}" - local posix_regex=${4:-true} - - local result - - # We should avoid using 'sed in-place' substitutions - # 1) They are not compatible with files mounted from ConfigMap(s) - # 2) We found incompatibility issues with Debian10 and "in-place" substitutions - local -r del=$'\001' # Use a non-printable character as a 'sed' delimiter to avoid issues - if [[ $posix_regex = true ]]; then - result="$(sed -E "s${del}${match_regex}${del}${substitute_regex}${del}g" "$filename")" - else - result="$(sed "s${del}${match_regex}${del}${substitute_regex}${del}g" "$filename")" - fi - echo "$result" > "$filename" -} - -######################## -# Replace a regex-matching multiline string in a file -# Arguments: -# $1 - filename -# $2 - match regex -# $3 - substitute regex -# Returns: -# None -######################### -replace_in_file_multiline() { - local filename="${1:?filename is required}" - local match_regex="${2:?match regex is required}" - local substitute_regex="${3:?substitute regex is required}" - - local result - local -r del=$'\001' # Use a non-printable character as a 'sed' delimiter to avoid issues - result="$(perl -pe "BEGIN{undef $/;} s${del}${match_regex}${del}${substitute_regex}${del}sg" "$filename")" - echo "$result" > "$filename" -} - -######################## -# Remove a line in a file based on a regex -# Arguments: -# $1 - filename -# $2 - match regex -# $3 - use POSIX regex. Default: true -# Returns: -# None -######################### -remove_in_file() { - local filename="${1:?filename is required}" - local match_regex="${2:?match regex is required}" - local posix_regex=${3:-true} - local result - - # We should avoid using 'sed in-place' substitutions - # 1) They are not compatible with files mounted from ConfigMap(s) - # 2) We found incompatibility issues with Debian10 and "in-place" substitutions - if [[ $posix_regex = true ]]; then - result="$(sed -E "/$match_regex/d" "$filename")" - else - result="$(sed "/$match_regex/d" "$filename")" - fi - echo "$result" > "$filename" -} - -######################## -# Appends text after the last line matching a pattern -# Arguments: -# $1 - file -# $2 - match regex -# $3 - contents to add -# Returns: -# None -######################### -append_file_after_last_match() { - local file="${1:?missing file}" - local match_regex="${2:?missing pattern}" - local value="${3:?missing value}" - - # We read the file in reverse, replace the first match (0,/pattern/s) and then reverse the results again - result="$(tac "$file" | sed -E "0,/($match_regex)/s||${value}\n\1|" | tac)" - echo "$result" > "$file" -} - -######################## -# Wait until certain entry is present in a log file -# Arguments: -# $1 - entry to look for -# $2 - log file -# $3 - max retries. Default: 12 -# $4 - sleep between retries (in seconds). Default: 5 -# Returns: -# Boolean -######################### -wait_for_log_entry() { - local -r entry="${1:-missing entry}" - local -r log_file="${2:-missing log file}" - local -r retries="${3:-12}" - local -r interval_time="${4:-5}" - local attempt=0 - - check_log_file_for_entry() { - if ! grep -qE "$entry" "$log_file"; then - debug "Entry \"${entry}\" still not present in ${log_file} (attempt $((++attempt))/${retries})" - return 1 - fi - } - debug "Checking that ${log_file} log file contains entry \"${entry}\"" - if retry_while check_log_file_for_entry "$retries" "$interval_time"; then - debug "Found entry \"${entry}\" in ${log_file}" - true - else - error "Could not find entry \"${entry}\" in ${log_file} after ${retries} retries" - debug_execute cat "$log_file" - return 1 - fi -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfs.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfs.sh deleted file mode 100644 index b438c913341d..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libfs.sh +++ /dev/null @@ -1,184 +0,0 @@ -#!/bin/bash -# -# Library for file system actions - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh - -# Functions - -######################## -# Ensure a file/directory is owned (user and group) but the given user -# Arguments: -# $1 - filepath -# $2 - owner -# Returns: -# None -######################### -owned_by() { - local path="${1:?path is missing}" - local owner="${2:?owner is missing}" - - chown "$owner":"$owner" "$path" -} - -######################## -# Ensure a directory exists and, optionally, is owned by the given user -# Arguments: -# $1 - directory -# $2 - owner -# Returns: -# None -######################### -ensure_dir_exists() { - local dir="${1:?directory is missing}" - local owner="${2:-}" - - mkdir -p "${dir}" - if [[ -n $owner ]]; then - owned_by "$dir" "$owner" - fi -} - -######################## -# Checks whether a directory is empty or not -# arguments: -# $1 - directory -# returns: -# boolean -######################### -is_dir_empty() { - local -r path="${1:?missing directory}" - # Calculate real path in order to avoid issues with symlinks - local -r dir="$(realpath "$path")" - if [[ ! -e "$dir" ]] || [[ -z "$(ls -A "$dir")" ]]; then - true - else - false - fi -} - -######################## -# Checks whether a mounted directory is empty or not -# arguments: -# $1 - directory -# returns: -# boolean -######################### -is_mounted_dir_empty() { - local dir="${1:?missing directory}" - - if is_dir_empty "$dir" || find "$dir" -mindepth 1 -maxdepth 1 -not -name ".snapshot" -not -name "lost+found" -exec false {} +; then - true - else - false - fi -} - -######################## -# Checks whether a file can be written to or not -# arguments: -# $1 - file -# returns: -# boolean -######################### -is_file_writable() { - local file="${1:?missing file}" - local dir - dir="$(dirname "$file")" - - if [[ (-f "$file" && -w "$file") || (! -f "$file" && -d "$dir" && -w "$dir") ]]; then - true - else - false - fi -} - -######################## -# Relativize a path -# arguments: -# $1 - path -# $2 - base -# returns: -# None -######################### -relativize() { - local -r path="${1:?missing path}" - local -r base="${2:?missing base}" - pushd "$base" >/dev/null || exit - realpath -q --no-symlinks --relative-base="$base" "$path" | sed -e 's|^/$|.|' -e 's|^/||' - popd >/dev/null || exit -} - -######################## -# Configure permisions and ownership recursively -# Globals: -# None -# Arguments: -# $1 - paths (as a string). -# Flags: -# -f|--file-mode - mode for directories. -# -d|--dir-mode - mode for files. -# -u|--user - user -# -g|--group - group -# Returns: -# None -######################### -configure_permissions_ownership() { - local -r paths="${1:?paths is missing}" - local dir_mode="" - local file_mode="" - local user="" - local group="" - - # Validate arguments - shift 1 - while [ "$#" -gt 0 ]; do - case "$1" in - -f | --file-mode) - shift - file_mode="${1:?missing mode for files}" - ;; - -d | --dir-mode) - shift - dir_mode="${1:?missing mode for directories}" - ;; - -u | --user) - shift - user="${1:?missing user}" - ;; - -g | --group) - shift - group="${1:?missing group}" - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - - read -r -a filepaths <<<"$paths" - for p in "${filepaths[@]}"; do - if [[ -e "$p" ]]; then - if [[ -n $dir_mode ]]; then - find -L "$p" -type d -exec chmod "$dir_mode" {} \; - fi - if [[ -n $file_mode ]]; then - find -L "$p" -type f -exec chmod "$file_mode" {} \; - fi - if [[ -n $user ]] && [[ -n $group ]]; then - chown -LR "$user":"$group" "$p" - elif [[ -n $user ]] && [[ -z $group ]]; then - chown -LR "$user" "$p" - elif [[ -z $user ]] && [[ -n $group ]]; then - chgrp -LR "$group" "$p" - fi - else - stderr_print "$p does not exist" - fi - done -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libhook.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libhook.sh deleted file mode 100644 index 9694852a7d25..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libhook.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -# -# Library to use for scripts expected to be used as Kubernetes lifecycle hooks - -# shellcheck disable=SC1091 - -# Load generic libraries -. /opt/bitnami/scripts/liblog.sh -. /opt/bitnami/scripts/libos.sh - -# Override functions that log to stdout/stderr of the current process, so they print to process 1 -for function_to_override in stderr_print debug_execute; do - # Output is sent to output of process 1 and thus end up in the container log - # The hook output in general isn't saved - eval "$(declare -f "$function_to_override") >/proc/1/fd/1 2>/proc/1/fd/2" -done diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/liblog.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/liblog.sh deleted file mode 100644 index c7c0f6d4422a..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/liblog.sh +++ /dev/null @@ -1,112 +0,0 @@ -#!/bin/bash -# -# Library for logging functions - -# Constants -RESET='\033[0m' -RED='\033[38;5;1m' -GREEN='\033[38;5;2m' -YELLOW='\033[38;5;3m' -MAGENTA='\033[38;5;5m' -CYAN='\033[38;5;6m' - -# Functions - -######################## -# Print to STDERR -# Arguments: -# Message to print -# Returns: -# None -######################### -stderr_print() { - # 'is_boolean_yes' is defined in libvalidations.sh, but depends on this file so we cannot source it - local bool="${BITNAMI_QUIET:-false}" - # comparison is performed without regard to the case of alphabetic characters - shopt -s nocasematch - if ! [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then - printf "%b\\n" "${*}" >&2 - fi -} - -######################## -# Log message -# Arguments: -# Message to log -# Returns: -# None -######################### -log() { - stderr_print "${CYAN}${MODULE:-} ${MAGENTA}$(date "+%T.%2N ")${RESET}${*}" -} -######################## -# Log an 'info' message -# Arguments: -# Message to log -# Returns: -# None -######################### -info() { - log "${GREEN}INFO ${RESET} ==> ${*}" -} -######################## -# Log message -# Arguments: -# Message to log -# Returns: -# None -######################### -warn() { - log "${YELLOW}WARN ${RESET} ==> ${*}" -} -######################## -# Log an 'error' message -# Arguments: -# Message to log -# Returns: -# None -######################### -error() { - log "${RED}ERROR${RESET} ==> ${*}" -} -######################## -# Log a 'debug' message -# Globals: -# BITNAMI_DEBUG -# Arguments: -# None -# Returns: -# None -######################### -debug() { - # 'is_boolean_yes' is defined in libvalidations.sh, but depends on this file so we cannot source it - local bool="${BITNAMI_DEBUG:-false}" - # comparison is performed without regard to the case of alphabetic characters - shopt -s nocasematch - if [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then - log "${MAGENTA}DEBUG${RESET} ==> ${*}" - fi -} - -######################## -# Indent a string -# Arguments: -# $1 - string -# $2 - number of indentation characters (default: 4) -# $3 - indentation character (default: " ") -# Returns: -# None -######################### -indent() { - local string="${1:-}" - local num="${2:?missing num}" - local char="${3:-" "}" - # Build the indentation unit string - local indent_unit="" - for ((i = 0; i < num; i++)); do - indent_unit="${indent_unit}${char}" - done - # shellcheck disable=SC2001 - # Complex regex, see https://github.com/koalaman/shellcheck/wiki/SC2001#exceptions - echo "$string" | sed "s/^/${indent_unit}/" -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libnet.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libnet.sh deleted file mode 100644 index 8bbf165e3e2a..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libnet.sh +++ /dev/null @@ -1,163 +0,0 @@ -#!/bin/bash -# -# Library for network functions - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh - -# Functions - -######################## -# Resolve IP address for a host/domain (i.e. DNS lookup) -# Arguments: -# $1 - Hostname to resolve -# $2 - IP address version (v4, v6), leave empty for resolving to any version -# Returns: -# IP -######################### -dns_lookup() { - local host="${1:?host is missing}" - local ip_version="${2:-}" - getent "ahosts${ip_version}" "$host" | awk '/STREAM/ {print $1 }' | head -n 1 -} - -######################### -# Wait for a hostname and return the IP -# Arguments: -# $1 - hostname -# $2 - number of retries -# $3 - seconds to wait between retries -# Returns: -# - IP address that corresponds to the hostname -######################### -wait_for_dns_lookup() { - local hostname="${1:?hostname is missing}" - local retries="${2:-5}" - local seconds="${3:-1}" - check_host() { - if [[ $(dns_lookup "$hostname") == "" ]]; then - false - else - true - fi - } - # Wait for the host to be ready - retry_while "check_host ${hostname}" "$retries" "$seconds" - dns_lookup "$hostname" -} - -######################## -# Get machine's IP -# Arguments: -# None -# Returns: -# Machine IP -######################### -get_machine_ip() { - local -a ip_addresses - local hostname - hostname="$(hostname)" - read -r -a ip_addresses <<< "$(dns_lookup "$hostname" | xargs echo)" - if [[ "${#ip_addresses[@]}" -gt 1 ]]; then - warn "Found more than one IP address associated to hostname ${hostname}: ${ip_addresses[*]}, will use ${ip_addresses[0]}" - elif [[ "${#ip_addresses[@]}" -lt 1 ]]; then - error "Could not find any IP address associated to hostname ${hostname}" - exit 1 - fi - echo "${ip_addresses[0]}" -} - -######################## -# Check if the provided argument is a resolved hostname -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_hostname_resolved() { - local -r host="${1:?missing value}" - if [[ -n "$(dns_lookup "$host")" ]]; then - true - else - false - fi -} - -######################## -# Parse URL -# Globals: -# None -# Arguments: -# $1 - uri - String -# $2 - component to obtain. Valid options (scheme, authority, userinfo, host, port, path, query or fragment) - String -# Returns: -# String -parse_uri() { - local uri="${1:?uri is missing}" - local component="${2:?component is missing}" - - # Solution based on https://tools.ietf.org/html/rfc3986#appendix-B with - # additional sub-expressions to split authority into userinfo, host and port - # Credits to Patryk Obara (see https://stackoverflow.com/a/45977232/6694969) - local -r URI_REGEX='^(([^:/?#]+):)?(//((([^@/?#]+)@)?([^:/?#]+)(:([0-9]+))?))?(/([^?#]*))?(\?([^#]*))?(#(.*))?' - # || | ||| | | | | | | | | | - # |2 scheme | ||6 userinfo 7 host | 9 port | 11 rpath | 13 query | 15 fragment - # 1 scheme: | |5 userinfo@ 8 :... 10 path 12 ?... 14 #... - # | 4 authority - # 3 //... - local index=0 - case "$component" in - scheme) - index=2 - ;; - authority) - index=4 - ;; - userinfo) - index=6 - ;; - host) - index=7 - ;; - port) - index=9 - ;; - path) - index=10 - ;; - query) - index=13 - ;; - fragment) - index=14 - ;; - *) - stderr_print "unrecognized component $component" - return 1 - ;; - esac - [[ "$uri" =~ $URI_REGEX ]] && echo "${BASH_REMATCH[${index}]}" -} - -######################## -# Wait for a HTTP connection to succeed -# Globals: -# * -# Arguments: -# $1 - URL to wait for -# $2 - Maximum amount of retries (optional) -# $3 - Time between retries (optional) -# Returns: -# true if the HTTP connection succeeded, false otherwise -######################### -wait_for_http_connection() { - local url="${1:?missing url}" - local retries="${2:-}" - local sleep_time="${3:-}" - if ! retry_while "debug_execute curl --silent ${url}" "$retries" "$sleep_time"; then - error "Could not connect to ${url}" - return 1 - fi -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libos.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libos.sh deleted file mode 100644 index 6bd564773acb..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libos.sh +++ /dev/null @@ -1,448 +0,0 @@ -#!/bin/bash -# -# Library for operating system actions - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh -. /opt/bitnami/scripts/libfs.sh - -# Functions - -######################## -# Check if an user exists in the system -# Arguments: -# $1 - user -# Returns: -# Boolean -######################### -user_exists() { - local user="${1:?user is missing}" - id "$user" >/dev/null 2>&1 -} - -######################## -# Check if a group exists in the system -# Arguments: -# $1 - group -# Returns: -# Boolean -######################### -group_exists() { - local group="${1:?group is missing}" - getent group "$group" >/dev/null 2>&1 -} - -######################## -# Create a group in the system if it does not exist already -# Arguments: -# $1 - group -# Flags: -# -i|--gid - the ID for the new group -# -s|--system - Whether to create new user as system user (uid <= 999) -# Returns: -# None -######################### -ensure_group_exists() { - local group="${1:?group is missing}" - local gid="" - local is_system_user=false - - # Validate arguments - shift 1 - while [ "$#" -gt 0 ]; do - case "$1" in - -i|--gid) - shift - gid="${1:?missing gid}" - ;; - -s|--system) - is_system_user=true - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - - if ! group_exists "$group"; then - local -a args=("$group") - if [[ -n "$gid" ]]; then - if group_exists "$gid" ; then - error "The GID $gid is already in use." >&2 - return 1 - fi - args+=("--gid" "$gid") - fi - $is_system_user && args+=("--system") - groupadd "${args[@]}" >/dev/null 2>&1 - fi -} - -######################## -# Create an user in the system if it does not exist already -# Arguments: -# $1 - user -# Flags: -# -i|--uid - the ID for the new user -# -g|--group - the group the new user should belong to -# -a|--append-groups - comma-separated list of supplemental groups to append to the new user -# -h|--home - the home directory for the new user -# -s|--system - whether to create new user as system user (uid <= 999) -# Returns: -# None -######################### -ensure_user_exists() { - local user="${1:?user is missing}" - local uid="" - local group="" - local append_groups="" - local home="" - local is_system_user=false - - # Validate arguments - shift 1 - while [ "$#" -gt 0 ]; do - case "$1" in - -i|--uid) - shift - uid="${1:?missing uid}" - ;; - -g|--group) - shift - group="${1:?missing group}" - ;; - -a|--append-groups) - shift - append_groups="${1:?missing append_groups}" - ;; - -h|--home) - shift - home="${1:?missing home directory}" - ;; - -s|--system) - is_system_user=true - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - - if ! user_exists "$user"; then - local -a user_args=("-N" "$user") - if [[ -n "$uid" ]]; then - if user_exists "$uid" ; then - error "The UID $uid is already in use." - return 1 - fi - user_args+=("--uid" "$uid") - else - $is_system_user && user_args+=("--system") - fi - useradd "${user_args[@]}" >/dev/null 2>&1 - fi - - if [[ -n "$group" ]]; then - local -a group_args=("$group") - $is_system_user && group_args+=("--system") - ensure_group_exists "${group_args[@]}" - usermod -g "$group" "$user" >/dev/null 2>&1 - fi - - if [[ -n "$append_groups" ]]; then - local -a groups - read -ra groups <<< "$(tr ',;' ' ' <<< "$append_groups")" - for group in "${groups[@]}"; do - ensure_group_exists "$group" - usermod -aG "$group" "$user" >/dev/null 2>&1 - done - fi - - if [[ -n "$home" ]]; then - mkdir -p "$home" - usermod -d "$home" "$user" >/dev/null 2>&1 - configure_permissions_ownership "$home" -d "775" -f "664" -u "$user" -g "$group" - fi -} - -######################## -# Check if the script is currently running as root -# Arguments: -# $1 - user -# $2 - group -# Returns: -# Boolean -######################### -am_i_root() { - if [[ "$(id -u)" = "0" ]]; then - true - else - false - fi -} - -######################## -# Print OS metadata -# Arguments: -# $1 - Flag name -# Flags: -# --id - Distro ID -# --version - Distro version -# --branch - Distro branch -# --codename - Distro codename -# Returns: -# String -######################### -get_os_metadata() { - local -r flag_name="${1:?missing flag}" - # Helper function - get_os_release_metadata() { - local -r env_name="${1:?missing environment variable name}" - ( - . /etc/os-release - echo "${!env_name}" - ) - } - case "$flag_name" in - --id) - get_os_release_metadata ID - ;; - --version) - get_os_release_metadata VERSION_ID - ;; - --branch) - get_os_release_metadata VERSION_ID | sed 's/\..*//' - ;; - --codename) - get_os_release_metadata VERSION_CODENAME - ;; - *) - error "Unknown flag ${flag_name}" - return 1 - esac -} - -######################## -# Get total memory available -# Arguments: -# None -# Returns: -# Memory in bytes -######################### -get_total_memory() { - echo $(($(grep MemTotal /proc/meminfo | awk '{print $2}') / 1024)) -} - -######################## -# Get machine size depending on specified memory -# Globals: -# None -# Arguments: -# None -# Flags: -# --memory - memory size (optional) -# Returns: -# Detected instance size -######################### -get_machine_size() { - local memory="" - # Validate arguments - while [[ "$#" -gt 0 ]]; do - case "$1" in - --memory) - shift - memory="${1:?missing memory}" - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - if [[ -z "$memory" ]]; then - debug "Memory was not specified, detecting available memory automatically" - memory="$(get_total_memory)" - fi - sanitized_memory=$(convert_to_mb "$memory") - if [[ "$sanitized_memory" -gt 26000 ]]; then - echo 2xlarge - elif [[ "$sanitized_memory" -gt 13000 ]]; then - echo xlarge - elif [[ "$sanitized_memory" -gt 6000 ]]; then - echo large - elif [[ "$sanitized_memory" -gt 3000 ]]; then - echo medium - elif [[ "$sanitized_memory" -gt 1500 ]]; then - echo small - else - echo micro - fi -} - -######################## -# Get machine size depending on specified memory -# Globals: -# None -# Arguments: -# $1 - memory size (optional) -# Returns: -# Detected instance size -######################### -get_supported_machine_sizes() { - echo micro small medium large xlarge 2xlarge -} - -######################## -# Convert memory size from string to amount of megabytes (i.e. 2G -> 2048) -# Globals: -# None -# Arguments: -# $1 - memory size -# Returns: -# Result of the conversion -######################### -convert_to_mb() { - local amount="${1:-}" - if [[ $amount =~ ^([0-9]+)(m|M|g|G) ]]; then - size="${BASH_REMATCH[1]}" - unit="${BASH_REMATCH[2]}" - if [[ "$unit" = "g" || "$unit" = "G" ]]; then - amount="$((size * 1024))" - else - amount="$size" - fi - fi - echo "$amount" -} - - -######################### -# Redirects output to /dev/null if debug mode is disabled -# Globals: -# BITNAMI_DEBUG -# Arguments: -# $@ - Command to execute -# Returns: -# None -######################### -debug_execute() { - if ${BITNAMI_DEBUG:-false}; then - "$@" - else - "$@" >/dev/null 2>&1 - fi -} - -######################## -# Retries a command a given number of times -# Arguments: -# $1 - cmd (as a string) -# $2 - max retries. Default: 12 -# $3 - sleep between retries (in seconds). Default: 5 -# Returns: -# Boolean -######################### -retry_while() { - local cmd="${1:?cmd is missing}" - local retries="${2:-12}" - local sleep_time="${3:-5}" - local return_value=1 - - read -r -a command <<< "$cmd" - for ((i = 1 ; i <= retries ; i+=1 )); do - "${command[@]}" && return_value=0 && break - sleep "$sleep_time" - done - return $return_value -} - -######################## -# Generate a random string -# Arguments: -# -t|--type - String type (ascii, alphanumeric, numeric), defaults to ascii -# -c|--count - Number of characters, defaults to 32 -# Arguments: -# None -# Returns: -# None -# Returns: -# String -######################### -generate_random_string() { - local type="ascii" - local count="32" - local filter - local result - # Validate arguments - while [[ "$#" -gt 0 ]]; do - case "$1" in - -t|--type) - shift - type="$1" - ;; - -c|--count) - shift - count="$1" - ;; - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - # Validate type - case "$type" in - ascii) - filter="[:print:]" - ;; - alphanumeric) - filter="a-zA-Z0-9" - ;; - numeric) - filter="0-9" - ;; - *) - echo "Invalid type ${type}" >&2 - return 1 - esac - # Obtain count + 10 lines from /dev/urandom to ensure that the resulting string has the expected size - # Note there is a very small chance of strings starting with EOL character - # Therefore, the higher amount of lines read, this will happen less frequently - result="$(head -n "$((count + 10))" /dev/urandom | tr -dc "$filter" | head -c "$count")" - echo "$result" -} - -######################## -# Create md5 hash from a string -# Arguments: -# $1 - string -# Returns: -# md5 hash - string -######################### -generate_md5_hash() { - local -r str="${1:?missing input string}" - echo -n "$str" | md5sum | awk '{print $1}' -} - -######################## -# Create sha1 hash from a string -# Arguments: -# $1 - string -# $2 - algorithm - 1 (default), 224, 256, 384, 512 -# Returns: -# sha1 hash - string -######################### -generate_sha_hash() { - local -r str="${1:?missing input string}" - local -r algorithm="${2:-1}" - echo -n "$str" | "sha${algorithm}sum" | awk '{print $1}' -} - diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libpersistence.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libpersistence.sh deleted file mode 100644 index 99df69681c27..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libpersistence.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/bash -# -# Bitnami persistence library -# Used for bringing persistence capabilities to applications that don't have clear separation of data and logic - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/libfs.sh -. /opt/bitnami/scripts/libos.sh -. /opt/bitnami/scripts/liblog.sh -. /opt/bitnami/scripts/libversion.sh - -# Functions - -######################## -# Persist an application directory -# Globals: -# BITNAMI_ROOT_DIR -# BITNAMI_VOLUME_DIR -# Arguments: -# $1 - App folder name -# $2 - List of app files to persist -# Returns: -# true if all steps succeeded, false otherwise -######################### -persist_app() { - local -r app="${1:?missing app}" - local -a files_to_restore - read -r -a files_to_persist <<< "$(tr ',;:' ' ' <<< "$2")" - local -r install_dir="${BITNAMI_ROOT_DIR}/${app}" - local -r persist_dir="${BITNAMI_VOLUME_DIR}/${app}" - # Persist the individual files - if [[ "${#files_to_persist[@]}" -le 0 ]]; then - warn "No files are configured to be persisted" - return - fi - pushd "$install_dir" >/dev/null || exit - local file_to_persist_relative file_to_persist_destination file_to_persist_destination_folder - local -r tmp_file="/tmp/perms.acl" - for file_to_persist in "${files_to_persist[@]}"; do - if [[ ! -f "$file_to_persist" && ! -d "$file_to_persist" ]]; then - error "Cannot persist '${file_to_persist}' because it does not exist" - return 1 - fi - file_to_persist_relative="$(relativize "$file_to_persist" "$install_dir")" - file_to_persist_destination="${persist_dir}/${file_to_persist_relative}" - file_to_persist_destination_folder="$(dirname "$file_to_persist_destination")" - # Get original permissions for existing files, which will be applied later - # Exclude the root directory with 'sed', to avoid issues when copying the entirety of it to a volume - getfacl -R "$file_to_persist_relative" | sed -E '/# file: (\..+|[^.])/,$!d' > "$tmp_file" - # Copy directories to the volume - ensure_dir_exists "$file_to_persist_destination_folder" - cp -Lr --preserve=links "$file_to_persist_relative" "$file_to_persist_destination_folder" - # Restore permissions - pushd "$persist_dir" >/dev/null || exit - if am_i_root; then - setfacl --restore="$tmp_file" - else - # When running as non-root, don't change ownership - setfacl --restore=<(grep -E -v '^# (owner|group):' "$tmp_file") - fi - popd >/dev/null || exit - done - popd >/dev/null || exit - rm -f "$tmp_file" - # Install the persisted files into the installation directory, via symlinks - restore_persisted_app "$@" -} - -######################## -# Restore a persisted application directory -# Globals: -# BITNAMI_ROOT_DIR -# BITNAMI_VOLUME_DIR -# FORCE_MAJOR_UPGRADE -# Arguments: -# $1 - App folder name -# $2 - List of app files to restore -# Returns: -# true if all steps succeeded, false otherwise -######################### -restore_persisted_app() { - local -r app="${1:?missing app}" - local -a files_to_restore - read -r -a files_to_restore <<< "$(tr ',;:' ' ' <<< "$2")" - local -r install_dir="${BITNAMI_ROOT_DIR}/${app}" - local -r persist_dir="${BITNAMI_VOLUME_DIR}/${app}" - # Restore the individual persisted files - if [[ "${#files_to_restore[@]}" -le 0 ]]; then - warn "No persisted files are configured to be restored" - return - fi - local file_to_restore_relative file_to_restore_origin file_to_restore_destination - for file_to_restore in "${files_to_restore[@]}"; do - file_to_restore_relative="$(relativize "$file_to_restore" "$install_dir")" - # We use 'realpath --no-symlinks' to ensure that the case of '.' is covered and the directory is removed - file_to_restore_origin="$(realpath --no-symlinks "${install_dir}/${file_to_restore_relative}")" - file_to_restore_destination="$(realpath --no-symlinks "${persist_dir}/${file_to_restore_relative}")" - rm -rf "$file_to_restore_origin" - ln -sfn "$file_to_restore_destination" "$file_to_restore_origin" - done -} - -######################## -# Check if an application directory was already persisted -# Globals: -# BITNAMI_VOLUME_DIR -# Arguments: -# $1 - App folder name -# Returns: -# true if all steps succeeded, false otherwise -######################### -is_app_initialized() { - local -r app="${1:?missing app}" - local -r persist_dir="${BITNAMI_VOLUME_DIR}/${app}" - if ! is_mounted_dir_empty "$persist_dir"; then - true - else - false - fi -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libservice.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libservice.sh deleted file mode 100644 index a713bd108e15..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libservice.sh +++ /dev/null @@ -1,273 +0,0 @@ -#!/bin/bash -# -# Library for managing services - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/libvalidations.sh -. /opt/bitnami/scripts/liblog.sh - -# Functions - -######################## -# Read the provided pid file and returns a PID -# Arguments: -# $1 - Pid file -# Returns: -# PID -######################### -get_pid_from_file() { - local pid_file="${1:?pid file is missing}" - - if [[ -f "$pid_file" ]]; then - if [[ -n "$(< "$pid_file")" ]] && [[ "$(< "$pid_file")" -gt 0 ]]; then - echo "$(< "$pid_file")" - fi - fi -} - -######################## -# Check if a provided PID corresponds to a running service -# Arguments: -# $1 - PID -# Returns: -# Boolean -######################### -is_service_running() { - local pid="${1:?pid is missing}" - - kill -0 "$pid" 2>/dev/null -} - -######################## -# Stop a service by sending a termination signal to its pid -# Arguments: -# $1 - Pid file -# $2 - Signal number (optional) -# Returns: -# None -######################### -stop_service_using_pid() { - local pid_file="${1:?pid file is missing}" - local signal="${2:-}" - local pid - - pid="$(get_pid_from_file "$pid_file")" - [[ -z "$pid" ]] || ! is_service_running "$pid" && return - - if [[ -n "$signal" ]]; then - kill "-${signal}" "$pid" - else - kill "$pid" - fi - - local counter=10 - while [[ "$counter" -ne 0 ]] && is_service_running "$pid"; do - sleep 1 - counter=$((counter - 1)) - done -} - -######################## -# Start cron daemon -# Arguments: -# None -# Returns: -# true if started correctly, false otherwise -######################### -cron_start() { - if [[ -x "/usr/sbin/cron" ]]; then - /usr/sbin/cron - elif [[ -x "/usr/sbin/crond" ]]; then - /usr/sbin/crond - else - false - fi -} - -######################## -# Generate a cron configuration file for a given service -# Arguments: -# $1 - Service name -# $2 - Command -# Flags: -# --run-as - User to run as (default: root) -# --schedule - Cron schedule configuration (default: * * * * *) -# Returns: -# None -######################### -generate_cron_conf() { - local service_name="${1:?service name is missing}" - local cmd="${2:?command is missing}" - local run_as="root" - local schedule="* * * * *" - local clean="true" - - local clean="true" - - # Parse optional CLI flags - shift 2 - while [[ "$#" -gt 0 ]]; do - case "$1" in - --run-as) - shift - run_as="$1" - ;; - --schedule) - shift - schedule="$1" - ;; - --no-clean) - clean="false" - ;; - *) - echo "Invalid command line flag ${1}" >&2 - return 1 - ;; - esac - shift - done - - mkdir -p /etc/cron.d - if "$clean"; then - echo "${schedule} ${run_as} ${cmd}" > /etc/cron.d/"$service_name" - else - echo "${schedule} ${run_as} ${cmd}" >> /etc/cron.d/"$service_name" - fi -} - -######################## -# Remove a cron configuration file for a given service -# Arguments: -# $1 - Service name -# Returns: -# None -######################### -remove_cron_conf() { - local service_name="${1:?service name is missing}" - local cron_conf_dir="/etc/monit/conf.d" - rm -f "${cron_conf_dir}/${service_name}" -} - -######################## -# Generate a monit configuration file for a given service -# Arguments: -# $1 - Service name -# $2 - Pid file -# $3 - Start command -# $4 - Stop command -# Flags: -# --disable - Whether to disable the monit configuration -# Returns: -# None -######################### -generate_monit_conf() { - local service_name="${1:?service name is missing}" - local pid_file="${2:?pid file is missing}" - local start_command="${3:?start command is missing}" - local stop_command="${4:?stop command is missing}" - local monit_conf_dir="/etc/monit/conf.d" - local disabled="no" - - # Parse optional CLI flags - shift 4 - while [[ "$#" -gt 0 ]]; do - case "$1" in - --disable) - disabled="yes" - ;; - *) - echo "Invalid command line flag ${1}" >&2 - return 1 - ;; - esac - shift - done - - is_boolean_yes "$disabled" && conf_suffix=".disabled" - mkdir -p "$monit_conf_dir" - cat >"${monit_conf_dir}/${service_name}.conf${conf_suffix:-}" <&2 - return 1 - ;; - esac - shift - done - - mkdir -p "$logrotate_conf_dir" - cat <"${logrotate_conf_dir}/${service_name}" -${log_path} { - ${period} - rotate ${rotations} - dateext - compress - copytruncate - missingok -$(indent "$extra" 2) -} -EOF -} - -######################## -# Remove a logrotate configuration file -# Arguments: -# $1 - Service name -# Returns: -# None -######################### -remove_logrotate_conf() { - local service_name="${1:?service name is missing}" - local logrotate_conf_dir="/etc/logrotate.d" - rm -f "${logrotate_conf_dir}/${service_name}" -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libvalidations.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libvalidations.sh deleted file mode 100644 index 2d7aaa943513..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libvalidations.sh +++ /dev/null @@ -1,264 +0,0 @@ -#!/bin/bash -# -# Validation functions library - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh - -# Functions - -######################## -# Check if the provided argument is an integer -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_int() { - local -r int="${1:?missing value}" - if [[ "$int" =~ ^-?[0-9]+ ]]; then - true - else - false - fi -} - -######################## -# Check if the provided argument is a positive integer -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_positive_int() { - local -r int="${1:?missing value}" - if is_int "$int" && (( "${int}" >= 0 )); then - true - else - false - fi -} - -######################## -# Check if the provided argument is a boolean or is the string 'yes/true' -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_boolean_yes() { - local -r bool="${1:-}" - # comparison is performed without regard to the case of alphabetic characters - shopt -s nocasematch - if [[ "$bool" = 1 || "$bool" =~ ^(yes|true)$ ]]; then - true - else - false - fi -} - -######################## -# Check if the provided argument is a boolean yes/no value -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_yes_no_value() { - local -r bool="${1:-}" - if [[ "$bool" =~ ^(yes|no)$ ]]; then - true - else - false - fi -} - -######################## -# Check if the provided argument is a boolean true/false value -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_true_false_value() { - local -r bool="${1:-}" - if [[ "$bool" =~ ^(true|false)$ ]]; then - true - else - false - fi -} - -######################## -# Check if the provided argument is a boolean 1/0 value -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_1_0_value() { - local -r bool="${1:-}" - if [[ "$bool" =~ ^[10]$ ]]; then - true - else - false - fi -} - -######################## -# Check if the provided argument is an empty string or not defined -# Arguments: -# $1 - Value to check -# Returns: -# Boolean -######################### -is_empty_value() { - local -r val="${1:-}" - if [[ -z "$val" ]]; then - true - else - false - fi -} - -######################## -# Validate if the provided argument is a valid port -# Arguments: -# $1 - Port to validate -# Returns: -# Boolean and error message -######################### -validate_port() { - local value - local unprivileged=0 - - # Parse flags - while [[ "$#" -gt 0 ]]; do - case "$1" in - -unprivileged) - unprivileged=1 - ;; - --) - shift - break - ;; - -*) - stderr_print "unrecognized flag $1" - return 1 - ;; - *) - break - ;; - esac - shift - done - - if [[ "$#" -gt 1 ]]; then - echo "too many arguments provided" - return 2 - elif [[ "$#" -eq 0 ]]; then - stderr_print "missing port argument" - return 1 - else - value=$1 - fi - - if [[ -z "$value" ]]; then - echo "the value is empty" - return 1 - else - if ! is_int "$value"; then - echo "value is not an integer" - return 2 - elif [[ "$value" -lt 0 ]]; then - echo "negative value provided" - return 2 - elif [[ "$value" -gt 65535 ]]; then - echo "requested port is greater than 65535" - return 2 - elif [[ "$unprivileged" = 1 && "$value" -lt 1024 ]]; then - echo "privileged port requested" - return 3 - fi - fi -} - -######################## -# Validate if the provided argument is a valid IPv4 address -# Arguments: -# $1 - IP to validate -# Returns: -# Boolean -######################### -validate_ipv4() { - local ip="${1:?ip is missing}" - local stat=1 - - if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - read -r -a ip_array <<< "$(tr '.' ' ' <<< "$ip")" - [[ ${ip_array[0]} -le 255 && ${ip_array[1]} -le 255 \ - && ${ip_array[2]} -le 255 && ${ip_array[3]} -le 255 ]] - stat=$? - fi - return $stat -} - -######################## -# Validate a string format -# Arguments: -# $1 - String to validate -# Returns: -# Boolean -######################### -validate_string() { - local string - local min_length=-1 - local max_length=-1 - - # Parse flags - while [ "$#" -gt 0 ]; do - case "$1" in - -min-length) - shift - min_length=${1:-} - ;; - -max-length) - shift - max_length=${1:-} - ;; - --) - shift - break - ;; - -*) - stderr_print "unrecognized flag $1" - return 1 - ;; - *) - break - ;; - esac - shift - done - - if [ "$#" -gt 1 ]; then - stderr_print "too many arguments provided" - return 2 - elif [ "$#" -eq 0 ]; then - stderr_print "missing string" - return 1 - else - string=$1 - fi - - if [[ "$min_length" -ge 0 ]] && [[ "${#string}" -lt "$min_length" ]]; then - echo "string length is less than $min_length" - return 1 - fi - if [[ "$max_length" -ge 0 ]] && [[ "${#string}" -gt "$max_length" ]]; then - echo "string length is great than $max_length" - return 1 - fi -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libversion.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libversion.sh deleted file mode 100644 index f3bc7568bfa5..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libversion.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -# -# Library for managing versions strings - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/liblog.sh - -# Functions -######################## -# Gets semantic version -# Arguments: -# $1 - version: string to extract major.minor.patch -# $2 - section: 1 to extract major, 2 to extract minor, 3 to extract patch -# Returns: -# array with the major, minor and release -######################### -get_sematic_version () { - local version="${1:?version is required}" - local section="${2:?section is required}" - local -a version_sections - - #Regex to parse versions: x.y.z - local -r regex='([0-9]+)(\.([0-9]+)(\.([0-9]+))?)?' - - if [[ "$version" =~ $regex ]]; then - local i=1 - local j=1 - local n=${#BASH_REMATCH[*]} - - while [[ $i -lt $n ]]; do - if [[ -n "${BASH_REMATCH[$i]}" ]] && [[ "${BASH_REMATCH[$i]:0:1}" != '.' ]]; then - version_sections[$j]=${BASH_REMATCH[$i]} - ((j++)) - fi - ((i++)) - done - - local number_regex='^[0-9]+$' - if [[ "$section" =~ $number_regex ]] && (( section > 0 )) && (( section <= 3 )); then - echo "${version_sections[$section]}" - return - else - stderr_print "Section allowed values are: 1, 2, and 3" - return 1 - fi - fi -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libwebserver.sh b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libwebserver.sh deleted file mode 100644 index 459f99c7d1cd..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/opt/bitnami/scripts/libwebserver.sh +++ /dev/null @@ -1,458 +0,0 @@ -#!/bin/bash -# -# Bitnami web server handler library - -# shellcheck disable=SC1090,SC1091 - -# Load generic libraries -. /opt/bitnami/scripts/liblog.sh - -######################## -# Execute a command (or list of commands) with the web server environment and library loaded -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_execute() { - local -r web_server="${1:?missing web server}" - shift - # Run program in sub-shell to avoid web server environment getting loaded when not necessary - ( - . "/opt/bitnami/scripts/lib${web_server}.sh" - . "/opt/bitnami/scripts/${web_server}-env.sh" - "$@" - ) -} - -######################## -# Prints the list of enabled web servers -# Globals: -# None -# Arguments: -# None -# Returns: -# None -######################### -web_server_list() { - local -r -a supported_web_servers=(apache nginx) - local -a existing_web_servers=() - for web_server in "${supported_web_servers[@]}"; do - [[ -f "/opt/bitnami/scripts/${web_server}-env.sh" ]] && existing_web_servers+=("$web_server") - done - echo "${existing_web_servers[@]:-}" -} - -######################## -# Prints the currently-enabled web server type (only one, in order of preference) -# Globals: -# None -# Arguments: -# None -# Returns: -# None -######################### -web_server_type() { - local -a web_servers - read -r -a web_servers <<< "$(web_server_list)" - echo "${web_servers[0]:-}" -} - -######################## -# Validate that a supported web server is configured -# Globals: -# None -# Arguments: -# None -# Returns: -# None -######################### -web_server_validate() { - local error_code=0 - local supported_web_servers=("apache" "nginx") - - # Auxiliary functions - print_validation_error() { - error "$1" - error_code=1 - } - - if [[ -z "$(web_server_type)" || ! " ${supported_web_servers[*]} " == *" $(web_server_type) "* ]]; then - print_validation_error "Could not detect any supported web servers. It must be one of: ${supported_web_servers[*]}" - elif ! web_server_execute "$(web_server_type)" type -t "is_$(web_server_type)_running" >/dev/null; then - print_validation_error "Could not load the $(web_server_type) web server library from /opt/bitnami/scripts. Check that it exists and is readable." - fi - - return "$error_code" -} - -######################## -# Check whether the web server is running -# Globals: -# * -# Arguments: -# None -# Returns: -# true if the web server is running, false otherwise -######################### -is_web_server_running() { - "is_$(web_server_type)_running" -} - -######################## -# Start web server -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_start() { - info "Starting $(web_server_type) in background" - "${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/start.sh" -} - -######################## -# Stop web server -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_stop() { - info "Stopping $(web_server_type)" - "${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/stop.sh" -} - -######################## -# Restart web server -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_restart() { - info "Restarting $(web_server_type)" - "${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/restart.sh" -} - -######################## -# Reload web server -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_reload() { - "${BITNAMI_ROOT_DIR}/scripts/$(web_server_type)/reload.sh" -} - -######################## -# Ensure a web server application configuration exists (i.e. Apache virtual host format or NGINX server block) -# It serves as a wrapper for the specific web server function -# Globals: -# * -# Arguments: -# $1 - App name -# Flags: -# --type - Application type, which has an effect on which configuration template to use -# --hosts - Host listen addresses -# --server-name - Server name -# --server-aliases - Server aliases -# --allow-remote-connections - Whether to allow remote connections or to require local connections -# --disable - Whether to render server configurations with a .disabled prefix -# --disable-http - Whether to render the app's HTTP server configuration with a .disabled prefix -# --disable-https - Whether to render the app's HTTPS server configuration with a .disabled prefix -# --http-port - HTTP port number -# --https-port - HTTPS port number -# --document-root - Path to document root directory -# Apache-specific flags: -# --apache-additional-configuration - Additional vhost configuration (no default) -# --apache-before-vhost-configuration - Configuration to add before the directive (no default) -# --apache-allow-override - Whether to allow .htaccess files (only allowed when --move-htaccess is set to 'no' and type is not defined) -# --apache-extra-directory-configuration - Extra configuration for the document root directory -# --apache-proxy-address - Address where to proxy requests -# --apache-proxy-configuration - Extra configuration for the proxy -# --apache-proxy-http-configuration - Extra configuration for the proxy HTTP vhost -# --apache-proxy-https-configuration - Extra configuration for the proxy HTTPS vhost -# --apache-move-htaccess - Move .htaccess files to a common place so they can be loaded during Apache startup (only allowed when type is not defined) -# NGINX-specific flags: -# --nginx-additional-configuration - Additional server block configuration (no default) -# --nginx-external-configuration - Configuration external to server block (no default) -# Returns: -# true if the configuration was enabled, false otherwise -######################## -ensure_web_server_app_configuration_exists() { - local app="${1:?missing app}" - shift - local -a apache_args nginx_args web_servers args_var - apache_args=("$app") - nginx_args=("$app") - # Validate arguments - while [[ "$#" -gt 0 ]]; do - case "$1" in - # Common flags - --disable \ - | --disable-http \ - | --disable-https \ - ) - apache_args+=("$1") - nginx_args+=("$1") - ;; - --hosts \ - | --server-name \ - | --server-aliases \ - | --type \ - | --allow-remote-connections \ - | --http-port \ - | --https-port \ - | --document-root \ - ) - apache_args+=("$1" "${2:?missing value}") - nginx_args+=("$1" "${2:?missing value}") - shift - ;; - - # Specific Apache flags - --apache-additional-configuration \ - | --apache-before-vhost-configuration \ - | --apache-allow-override \ - | --apache-extra-directory-configuration \ - | --apache-proxy-address \ - | --apache-proxy-configuration \ - | --apache-proxy-http-configuration \ - | --apache-proxy-https-configuration \ - | --apache-move-htaccess \ - ) - apache_args+=("${1//apache-/}" "${2:?missing value}") - shift - ;; - - # Specific NGINX flags - --nginx-additional-configuration \ - | --nginx-external-configuration) - nginx_args+=("${1//nginx-/}" "${2:?missing value}") - shift - ;; - - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - read -r -a web_servers <<< "$(web_server_list)" - for web_server in "${web_servers[@]}"; do - args_var="${web_server}_args[@]" - web_server_execute "$web_server" "ensure_${web_server}_app_configuration_exists" "${!args_var}" - done -} - -######################## -# Ensure a web server application configuration does not exist anymore (i.e. Apache virtual host format or NGINX server block) -# It serves as a wrapper for the specific web server function -# Globals: -# * -# Arguments: -# $1 - App name -# Returns: -# true if the configuration was disabled, false otherwise -######################## -ensure_web_server_app_configuration_not_exists() { - local app="${1:?missing app}" - local -a web_servers - read -r -a web_servers <<< "$(web_server_list)" - for web_server in "${web_servers[@]}"; do - web_server_execute "$web_server" "ensure_${web_server}_app_configuration_not_exists" "$app" - done -} - -######################## -# Ensure the web server loads the configuration for an application in a URL prefix -# It serves as a wrapper for the specific web server function -# Globals: -# * -# Arguments: -# $1 - App name -# Flags: -# --allow-remote-connections - Whether to allow remote connections or to require local connections -# --document-root - Path to document root directory -# --prefix - URL prefix from where it will be accessible (i.e. /myapp) -# --type - Application type, which has an effect on what configuration template will be used -# Apache-specific flags: -# --apache-additional-configuration - Additional vhost configuration (no default) -# --apache-allow-override - Whether to allow .htaccess files (only allowed when --move-htaccess is set to 'no') -# --apache-extra-directory-configuration - Extra configuration for the document root directory -# --apache-move-htaccess - Move .htaccess files to a common place so they can be loaded during Apache startup -# NGINX-specific flags: -# --nginx-additional-configuration - Additional server block configuration (no default) -# Returns: -# true if the configuration was enabled, false otherwise -######################## -ensure_web_server_prefix_configuration_exists() { - local app="${1:?missing app}" - shift - local -a apache_args nginx_args web_servers args_var - apache_args=("$app") - nginx_args=("$app") - # Validate arguments - while [[ "$#" -gt 0 ]]; do - case "$1" in - # Common flags - --allow-remote-connections \ - | --document-root \ - | --prefix \ - | --type \ - ) - apache_args+=("$1" "${2:?missing value}") - nginx_args+=("$1" "${2:?missing value}") - shift - ;; - - # Specific Apache flags - --apache-additional-configuration \ - | --apache-allow-override \ - | --apache-extra-directory-configuration \ - | --apache-move-htaccess \ - ) - apache_args+=("${1//apache-/}" "$2") - shift - ;; - - # Specific NGINX flags - --nginx-additional-configuration) - nginx_args+=("${1//nginx-/}" "$2") - shift - ;; - - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - read -r -a web_servers <<< "$(web_server_list)" - for web_server in "${web_servers[@]}"; do - args_var="${web_server}_args[@]" - web_server_execute "$web_server" "ensure_${web_server}_prefix_configuration_exists" "${!args_var}" - done -} - -######################## -# Ensure a web server application configuration is updated with the runtime configuration (i.e. ports) -# It serves as a wrapper for the specific web server function -# Globals: -# * -# Arguments: -# $1 - App name -# Flags: -# --hosts - Host listen addresses -# --server-name - Server name -# --server-aliases - Server aliases -# --enable-http - Enable HTTP app configuration (if not enabled already) -# --enable-https - Enable HTTPS app configuration (if not enabled already) -# --disable-http - Disable HTTP app configuration (if not disabled already) -# --disable-https - Disable HTTPS app configuration (if not disabled already) -# --http-port - HTTP port number -# --https-port - HTTPS port number -# Returns: -# true if the configuration was updated, false otherwise -######################## -web_server_update_app_configuration() { - local app="${1:?missing app}" - shift - local -a args web_servers - args=("$app") - # Validate arguments - while [[ "$#" -gt 0 ]]; do - case "$1" in - # Common flags - --enable-http \ - | --enable-https \ - | --disable-http \ - | --disable-https \ - ) - args+=("$1") - ;; - --hosts \ - | --server-name \ - | --server-aliases \ - | --enable-http \ - | --enable-https \ - | --disable-http \ - | --disable-https \ - | --http-port \ - | --https-port \ - ) - args+=("$1" "${2:?missing value}") - shift - ;; - - *) - echo "Invalid command line flag $1" >&2 - return 1 - ;; - esac - shift - done - read -r -a web_servers <<< "$(web_server_list)" - for web_server in "${web_servers[@]}"; do - web_server_execute "$web_server" "${web_server}_update_app_configuration" "${args[@]}" - done -} - -######################## -# Enable loading page, which shows users that the initialization process is not yet completed -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_enable_loading_page() { - ensure_web_server_app_configuration_exists "__loading" --hosts "_default_" \ - --apache-additional-configuration " -# Show a HTTP 503 Service Unavailable page by default -RedirectMatch 503 ^/$ -# Show index.html if server is answering with 404 Not Found or 503 Service Unavailable status codes -ErrorDocument 404 /index.html -ErrorDocument 503 /index.html" \ - --nginx-additional-configuration " -# Show a HTTP 503 Service Unavailable page by default -location / { - return 503; -} -# Show index.html if server is answering with 404 Not Found or 503 Service Unavailable status codes -error_page 404 @installing; -error_page 503 @installing; -location @installing { - rewrite ^(.*)$ /index.html break; -}" - web_server_reload -} - -######################## -# Enable loading page, which shows users that the initialization process is not yet completed -# Globals: -# * -# Arguments: -# None -# Returns: -# None -######################### -web_server_disable_install_page() { - ensure_web_server_app_configuration_not_exists "__loading" - web_server_reload -} diff --git a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/usr/sbin/install_packages b/bitnami/airflow-scheduler/1/debian-10/prebuildfs/usr/sbin/install_packages deleted file mode 100755 index c9577647443b..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/prebuildfs/usr/sbin/install_packages +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -set -e -set -u -export DEBIAN_FRONTEND=noninteractive -n=0 -max=2 -until [ $n -gt $max ]; do - set +e - ( - apt-get update -qq && - apt-get install -y --no-install-recommends "$@" - ) - CODE=$? - set -e - if [ $CODE -eq 0 ]; then - break - fi - if [ $n -eq $max ]; then - exit $CODE - fi - echo "apt failed, retrying" - n=$(($n + 1)) -done -rm -r /var/lib/apt/lists /var/cache/apt/archives diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler-env.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler-env.sh deleted file mode 100644 index 61c4fcdb879d..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler-env.sh +++ /dev/null @@ -1,97 +0,0 @@ -#!/bin/bash -# -# Environment configuration for airflow-scheduler - -# The values for all environment variables will be set in the below order of precedence -# 1. Custom environment variables defined below after Bitnami defaults -# 2. Constants defined in this file (environment variables with no default), i.e. BITNAMI_ROOT_DIR -# 3. Environment variables overridden via external files using *_FILE variables (see below) -# 4. Environment variables set externally (i.e. current Bash context/Dockerfile/userdata) - -# Load logging library -. /opt/bitnami/scripts/liblog.sh - -export BITNAMI_ROOT_DIR="/opt/bitnami" -export BITNAMI_VOLUME_DIR="/bitnami" - -# Logging configuration -export MODULE="${MODULE:-airflow-scheduler}" -export BITNAMI_DEBUG="${BITNAMI_DEBUG:-false}" - -# By setting an environment variable matching *_FILE to a file path, the prefixed environment -# variable will be overridden with the value specified in that file -airflow_scheduler_env_vars=( - AIRFLOW_EXECUTOR - AIRFLOW_FERNET_KEY - AIRFLOW_SECRET_KEY - AIRFLOW_WEBSERVER_HOST - AIRFLOW_WEBSERVER_PORT_NUMBER - AIRFLOW_LOAD_EXAMPLES - AIRFLOW_HOSTNAME_CALLABLE - AIRFLOW_DATABASE_HOST - AIRFLOW_DATABASE_PORT_NUMBER - AIRFLOW_DATABASE_NAME - AIRFLOW_DATABASE_USERNAME - AIRFLOW_DATABASE_PASSWORD - AIRFLOW_DATABASE_USE_SSL - AIRFLOW_REDIS_USE_SSL - REDIS_HOST - REDIS_PORT_NUMBER - REDIS_USER - REDIS_PASSWORD -) -for env_var in "${airflow_scheduler_env_vars[@]}"; do - file_env_var="${env_var}_FILE" - if [[ -n "${!file_env_var:-}" ]]; then - if [[ -r "${!file_env_var:-}" ]]; then - export "${env_var}=$(< "${!file_env_var}")" - unset "${file_env_var}" - else - warn "Skipping export of '${env_var}'. '${!file_env_var:-}' is not readable." - fi - fi -done -unset airflow_scheduler_env_vars - -# Airflow paths -export AIRFLOW_BASE_DIR="${BITNAMI_ROOT_DIR}/airflow" -export AIRFLOW_HOME="${AIRFLOW_BASE_DIR}" -export AIRFLOW_BIN_DIR="${AIRFLOW_BASE_DIR}/venv/bin" -export AIRFLOW_VOLUME_DIR="/bitnami/airflow" -export AIRFLOW_DATA_DIR="${AIRFLOW_BASE_DIR}/data" -export AIRFLOW_LOGS_DIR="${AIRFLOW_BASE_DIR}/logs" -export AIRFLOW_SCHEDULER_LOGS_DIR="${AIRFLOW_LOGS_DIR}/scheduler" -export AIRFLOW_LOG_FILE="${AIRFLOW_LOGS_DIR}/airflow-scheduler.log" -export AIRFLOW_CONF_FILE="${AIRFLOW_BASE_DIR}/airflow.cfg" -export AIRFLOW_TMP_DIR="${AIRFLOW_BASE_DIR}/tmp" -export AIRFLOW_PID_FILE="${AIRFLOW_TMP_DIR}/airflow-scheduler.pid" -export AIRFLOW_DATA_TO_PERSIST="$AIRFLOW_DATA_DIR" -export AIRFLOW_DAGS_DIR="${AIRFLOW_BASE_DIR}/dags" - -# System users (when running with a privileged user) -export AIRFLOW_DAEMON_USER="airflow" -export AIRFLOW_DAEMON_GROUP="airflow" - -# Airflow configuration -export AIRFLOW_EXECUTOR="${AIRFLOW_EXECUTOR:-SequentialExecutor}" -export AIRFLOW_FERNET_KEY="${AIRFLOW_FERNET_KEY:-}" -export AIRFLOW_SECRET_KEY="${AIRFLOW_SECRET_KEY:-}" -export AIRFLOW_WEBSERVER_HOST="${AIRFLOW_WEBSERVER_HOST:-127.0.0.1}" -export AIRFLOW_WEBSERVER_PORT_NUMBER="${AIRFLOW_WEBSERVER_PORT_NUMBER:-8080}" -export AIRFLOW_LOAD_EXAMPLES="${AIRFLOW_LOAD_EXAMPLES:-yes}" -export AIRFLOW_HOSTNAME_CALLABLE="${AIRFLOW_HOSTNAME_CALLABLE:-}" - -# Airflow database configuration -export AIRFLOW_DATABASE_HOST="${AIRFLOW_DATABASE_HOST:-postgresql}" -export AIRFLOW_DATABASE_PORT_NUMBER="${AIRFLOW_DATABASE_PORT_NUMBER:-5432}" -export AIRFLOW_DATABASE_NAME="${AIRFLOW_DATABASE_NAME:-bitnami_airflow}" -export AIRFLOW_DATABASE_USERNAME="${AIRFLOW_DATABASE_USERNAME:-bn_airflow}" -export AIRFLOW_DATABASE_PASSWORD="${AIRFLOW_DATABASE_PASSWORD:-}" -export AIRFLOW_DATABASE_USE_SSL="${AIRFLOW_DATABASE_USE_SSL:-no}" -export AIRFLOW_REDIS_USE_SSL="${AIRFLOW_REDIS_USE_SSL:-no}" -export REDIS_HOST="${REDIS_HOST:-redis}" -export REDIS_PORT_NUMBER="${REDIS_PORT_NUMBER:-6379}" -export REDIS_USER="${REDIS_USER:-}" -export REDIS_PASSWORD="${REDIS_PASSWORD:-}" - -# Custom environment variables may be defined below diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/entrypoint.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/entrypoint.sh deleted file mode 100755 index f5dc789d491a..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/entrypoint.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash - -# shellcheck disable=SC1091 - -set -o errexit -set -o nounset -set -o pipefail -# set -o xtrace # Uncomment this line for debugging purpose - -# Load Airflow environment variables -. /opt/bitnami/scripts/airflow-scheduler-env.sh - -# Load libraries -. /opt/bitnami/scripts/libbitnami.sh -. /opt/bitnami/scripts/libairflowscheduler.sh - -print_welcome_page - -if ! am_i_root && [[ -e "$LIBNSS_WRAPPER_PATH" ]]; then - info "Enabling non-root system user with nss_wrapper" - echo "airflow:x:$(id -u):$(id -g):Airflow:$AIRFLOW_HOME:/bin/false" > "$NSS_WRAPPER_PASSWD" - echo "airflow:x:$(id -g):" > "$NSS_WRAPPER_GROUP" - - export LD_PRELOAD="$LIBNSS_WRAPPER_PATH" -fi - -# Install custom python package if requirements.txt is present -if [[ -f "/bitnami/python/requirements.txt" ]]; then - . /opt/bitnami/airflow/venv/bin/activate - pip install -r /bitnami/python/requirements.txt - deactivate -fi - -if [[ "$*" = *"/opt/bitnami/scripts/airflow-scheduler/run.sh"* || "$*" = *"/run.sh"* ]]; then - info "** Starting Airflow setup **" - /opt/bitnami/scripts/airflow-scheduler/setup.sh - info "** Airflow setup finished! **" -fi - -echo "" -exec "$@" diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/postunpack.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/postunpack.sh deleted file mode 100755 index 32ee82bcd10f..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/postunpack.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -# shellcheck disable=SC1091 - -set -o errexit -set -o nounset -set -o pipefail -# set -o xtrace # Uncomment this line for debugging purpose - -# Load Airflow environment variables -. /opt/bitnami/scripts/airflow-scheduler-env.sh - -# Load libraries -. /opt/bitnami/scripts/libairflowscheduler.sh -. /opt/bitnami/scripts/libfs.sh -. /opt/bitnami/scripts/libos.sh - -for dir in "$AIRFLOW_VOLUME_DIR" "$AIRFLOW_BASE_DIR" "$AIRFLOW_DATA_DIR"; do - ensure_dir_exists "$dir" -done - -# Ensure the needed directories exist with write permissions -for dir in "$AIRFLOW_TMP_DIR" "$AIRFLOW_LOGS_DIR" "$AIRFLOW_DAGS_DIR"; do - ensure_dir_exists "$dir" - configure_permissions_ownership "$dir" -d "775" -f "664" -g "root" -done - -chmod -R g+rwX /bitnami "$AIRFLOW_VOLUME_DIR" "$AIRFLOW_BASE_DIR" diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/run.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/run.sh deleted file mode 100755 index 9b5c290b3f7f..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/run.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -# shellcheck disable=SC1091 - -set -o errexit -set -o nounset -set -o pipefail -# set -o xtrace # Uncomment this line for debugging purposes - -# Load Airflow environment variables -. /opt/bitnami/scripts/airflow-scheduler-env.sh - -# Load libraries -. /opt/bitnami/scripts/libos.sh -. /opt/bitnami/scripts/libairflowscheduler.sh - -args=("--pid" "$AIRFLOW_PID_FILE" "$@") - -info "** Starting Airflow **" -if am_i_root; then - exec gosu "$AIRFLOW_DAEMON_USER" "${AIRFLOW_BIN_DIR}/airflow" "scheduler" "${args[@]}" -else - exec "${AIRFLOW_BIN_DIR}/airflow" "scheduler" "${args[@]}" -fi diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/setup.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/setup.sh deleted file mode 100755 index 93e2487a55d7..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/airflow-scheduler/setup.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -# shellcheck disable=SC1091 - -set -o errexit -set -o nounset -set -o pipefail -# set -o xtrace # Uncomment this line for debugging purposes - -# Load Airflow environment variables -. /opt/bitnami/scripts/airflow-scheduler-env.sh - -# Load libraries -. /opt/bitnami/scripts/libos.sh -. /opt/bitnami/scripts/libfs.sh -. /opt/bitnami/scripts/libairflowscheduler.sh - -# Ensure Airflow environment variables settings are valid -airflow_scheduler_validate -# Ensure Airflow daemon user exists when running as root -am_i_root && ensure_user_exists "$AIRFLOW_DAEMON_USER" --group "$AIRFLOW_DAEMON_GROUP" -# Ensure Airflow is initialized -airflow_scheduler_initialize diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflow.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflow.sh deleted file mode 100644 index df42e00100f6..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflow.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/bin/bash - -# Bitnami Airflow library - -# shellcheck disable=SC1091,SC2153 - -# Load Generic Libraries -. /opt/bitnami/scripts/libfile.sh -. /opt/bitnami/scripts/liblog.sh -. /opt/bitnami/scripts/libnet.sh -. /opt/bitnami/scripts/libos.sh -. /opt/bitnami/scripts/libservice.sh -. /opt/bitnami/scripts/libvalidations.sh -. /opt/bitnami/scripts/libpersistence.sh - -# Functions - -######################## -# Validate Airflow inputs -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# 0 if the validation succeeded, 1 otherwise -######################### -airflow_validate() { - local error_code=0 - - # Auxiliary functions - print_validation_error() { - error "$1" - error_code=1 - } - - check_multi_value() { - if [[ " ${2} " != *" ${!1} "* ]]; then - print_validation_error "The allowed values for ${1} are: ${2}" - fi - } - - # Check postgresql host - [[ -z "$AIRFLOW_DATABASE_HOST" ]] && print_validation_error "Missing AIRFLOW_DATABASE_HOST" - - # Check LDAP parameters - if is_boolean_yes "$AIRFLOW_LDAP_ENABLE"; then - [[ -z "$AIRFLOW_LDAP_URI" ]] && print_validation_error "Missing AIRFLOW_LDAP_URI" - [[ -z "$AIRFLOW_LDAP_SEARCH" ]] && print_validation_error "Missing AIRFLOW_LDAP_SEARCH" - [[ -z "$AIRFLOW_LDAP_UID_FIELD" ]] && print_validation_error "Missing AIRFLOW_LDAP_UID_FIELD" - [[ -z "$AIRFLOW_LDAP_BIND_USER" ]] && print_validation_error "Missing AIRFLOW_LDAP_BIND_USER" - [[ -z "$AIRFLOW_LDAP_BIND_PASSWORD" ]] && print_validation_error "Missing AIRFLOW_LDAP_BIND_PASSWORD" - [[ -z "$AIRFLOW_LDAP_ROLES_MAPPING" ]] && print_validation_error "Missing AIRFLOW_LDAP_ROLES_MAPPING" - [[ -z "$AIRFLOW_LDAP_ROLES_SYNC_AT_LOGIN" ]] && print_validation_error "Missing AIRFLOW_LDAP_ROLES_SYNC_AT_LOGIN" - [[ -z "$AIRFLOW_LDAP_USER_REGISTRATION" ]] && print_validation_error "Missing AIRFLOW_LDAP_USER_REGISTRATION" - [[ -z "$AIRFLOW_LDAP_USER_REGISTRATION_ROLE" ]] && print_validation_error "Missing AIRFLOW_LDAP_USER_REGISTRATION_ROLE" - - # Chack boolean env vars contain valid values - for var in "AIRFLOW_LDAP_USER_REGISTRATION" "AIRFLOW_LDAP_ROLES_SYNC_AT_LOGIN" "AIRFLOW_LDAP_USE_TLS"; do - check_multi_value "$var" "True False" - done - - if [[ "$AIRFLOW_LDAP_USE_TLS" == "True" ]]; then - [[ -z "$AIRFLOW_LDAP_ALLOW_SELF_SIGNED" ]] && print_validation_error "Missing AIRFLOW_LDAP_ALLOW_SELF_SIGNED" - [[ -z "$AIRFLOW_LDAP_TLS_CA_CERTIFICATE" ]] && print_validation_error "Missing AIRFLOW_LDAP_TLS_CA_CERTIFICATE" - fi - - fi - - # Check pool parameters - if [[ -n "$AIRFLOW_POOL_NAME" ]]; then - [[ -z "$AIRFLOW_POOL_DESC" ]] && print_validation_error "Provided AIRFLOW_POOL_NAME but missing AIRFLOW_POOL_DESC" - [[ -z "$AIRFLOW_POOL_SIZE" ]] && print_validation_error "Provided AIRFLOW_POOL_NAME but missing AIRFLOW_POOL_SIZE" - fi - - return "$error_code" -} - -######################## -# Ensure Airflow is initialized -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_initialize() { - info "Initializing Airflow ..." - - # Change permissions if running as root - for dir in "$AIRFLOW_DATA_DIR" "$AIRFLOW_TMP_DIR" "$AIRFLOW_LOGS_DIR" "$AIRFLOW_DAGS_DIR"; do - ensure_dir_exists "$dir" - am_i_root && chown "$AIRFLOW_DAEMON_USER:$AIRFLOW_DAEMON_GROUP" "$dir" - done - - # The configuration file is not persisted. If it is not provided, generate it based on env vars - if [[ ! -f "$AIRFLOW_CONF_FILE" ]]; then - info "No injected configuration file found. Creating default config file" - airflow_generate_config - else - info "Configuration file found, loading configuration" - fi - - - # Check if Airflow has already been initialized and persisted in a previous run - local -r app_name="airflow" - if ! is_app_initialized "$app_name"; then - # Delete pid file - rm -f "$AIRFLOW_PID_FILE" - - airflow_wait_for_postgresql "$AIRFLOW_DATABASE_HOST" "$AIRFLOW_DATABASE_PORT_NUMBER" - - # Initialize database - airflow_execute_command "initdb" "db init" - - airflow_create_admin_user - - airflow_create_pool - - info "Persisting Airflow installation" - persist_app "$app_name" "$AIRFLOW_DATA_TO_PERSIST" - else - # Check database connection - airflow_wait_for_postgresql "$AIRFLOW_DATABASE_HOST" "$AIRFLOW_DATABASE_PORT_NUMBER" - - # Restore persisted data - info "Restoring persisted Airflow installation" - restore_persisted_app "$app_name" "$AIRFLOW_DATA_TO_PERSIST" - - # Upgrade database - airflow_execute_command "upgradedb" "db upgrade" - - # Change the permissions after restoring the persisted data in case we are root - for dir in "$AIRFLOW_DATA_DIR" "$AIRFLOW_TMP_DIR" "$AIRFLOW_LOGS_DIR"; do - ensure_dir_exists "$dir" - am_i_root && chown "$AIRFLOW_DAEMON_USER:$AIRFLOW_DAEMON_GROUP" "$dir" - done - true # Avoid return false when I am not root - fi -} - -######################## -# Executes airflow command -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_execute_command() { - local oldCommand="${1?Missing old command}" - local newCommand="${2?Missing new command}" - local flags="${3:-}" - - # The commands can contain more than one argument. Convert them to an array - IFS=' ' read -ra oldCommand <<< "$oldCommand" - IFS=' ' read -ra newCommand <<< "$newCommand" - - # Execute commands depending on the version - command=("${oldCommand[@]}") - [[ "${BITNAMI_IMAGE_VERSION:0:1}" == "2" ]] && command=("${newCommand[@]}") - - # Add flags if provided - [[ -n "$flags" ]] && IFS=' ' read -ra flags <<< "$flags" && command+=("${flags[@]}") - - debug "Executing ${AIRFLOW_BIN_DIR}/airflow ${command[*]}" - debug_execute "${AIRFLOW_BIN_DIR}/airflow" "${command[@]}" -} - -######################## -# Generate Airflow conf file -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_generate_config() { - # Generate Airflow default files - airflow_execute_command "version" "version" - - # Setup Airflow base URL - airflow_configure_base_url - # Configure Airflow Hostname - [[ -n "$AIRFLOW_HOSTNAME_CALLABLE" ]] && airflow_conf_set "core" "hostname_callable" "$AIRFLOW_HOSTNAME_CALLABLE" - # Configure Airflow webserver authentication - airflow_configure_webserver_authentication - # Configure Airflow to load examples - if is_boolean_yes "$AIRFLOW_LOAD_EXAMPLES"; then - airflow_conf_set "core" "load_examples" "True" - else - airflow_conf_set "core" "load_examples" "False" - fi - # Configure Airflow database - airflow_configure_database - - # Configure the Webserver port - airflow_conf_set "webserver" "web_server_port" "$AIRFLOW_WEBSERVER_PORT_NUMBER" - - # Setup the secret keys for database connection and flask application (fernet key and secret key) - # ref: https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#fernet-key - # ref: https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#secret-key - [[ -n "$AIRFLOW_FERNET_KEY" ]] && airflow_conf_set "core" "fernet_key" "$AIRFLOW_FERNET_KEY" - [[ -n "$AIRFLOW_SECRET_KEY" ]] && airflow_conf_set "webserver" "secret_key" "$AIRFLOW_SECRET_KEY" - - # Configure Airflow executor - airflow_conf_set "core" "executor" "$AIRFLOW_EXECUTOR" - [[ "$AIRFLOW_EXECUTOR" == "CeleryExecutor" || "$AIRFLOW_EXECUTOR" == "CeleryKubernetesExecutor" ]] && airflow_configure_celery_executor - true # Avoid the function to fail due to the check above -} - -######################## -# Set property on the Airflow configuration file -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_conf_set() { - local -r section="${1:?section is required}" - local -r key="${2:?key is required}" - local -r value="${3:?value is required}" - local -r file="${4:-${AIRFLOW_CONF_FILE}}" - - ini-file set --section "$section" --key "$key" --value "$value" "$file" -} - -######################## -# Configure Airflow base url -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_configure_base_url() { - if [[ -z "$AIRFLOW_BASE_URL" ]]; then - airflow_conf_set "webserver" "base_url" "http://${AIRFLOW_WEBSERVER_HOST}:${AIRFLOW_WEBSERVER_PORT_NUMBER}" - else - airflow_conf_set "webserver" "base_url" "$AIRFLOW_BASE_URL" - fi -} - -######################## -# Configure Airflow webserver authentication -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_configure_webserver_authentication() { - info "Configuring Airflow webserver authentication" - airflow_conf_set "webserver" "rbac" "true" - - if is_boolean_yes "$AIRFLOW_LDAP_ENABLE"; then - info "Enabling LDAP authentication" - replace_in_file "$AIRFLOW_WEBSERVER_CONF_FILE" "# from flask_appbuilder.security.manager import AUTH_LDAP" "from flask_appbuilder.security.manager import AUTH_LDAP" - replace_in_file "$AIRFLOW_WEBSERVER_CONF_FILE" "from flask_appbuilder.security.manager import AUTH_DB" "# from flask_appbuilder.security.manager import AUTH_DB" - - # webserver config - airflow_webserver_conf_set "AUTH_TYPE" "AUTH_LDAP" - airflow_webserver_conf_set "AUTH_LDAP_SERVER" "'$AIRFLOW_LDAP_URI'" - - # searches - airflow_webserver_conf_set "AUTH_LDAP_SEARCH" "'$AIRFLOW_LDAP_SEARCH'" - airflow_webserver_conf_set "AUTH_LDAP_UID_FIELD" "'$AIRFLOW_LDAP_UID_FIELD'" - - # Special account for searches - airflow_webserver_conf_set "AUTH_LDAP_BIND_USER" "'$AIRFLOW_LDAP_BIND_USER'" - airflow_webserver_conf_set "AUTH_LDAP_BIND_PASSWORD" "'$AIRFLOW_LDAP_BIND_PASSWORD'" - - # User self registration - airflow_webserver_conf_set "AUTH_USER_REGISTRATION" "$AIRFLOW_LDAP_USER_REGISTRATION" - airflow_webserver_conf_set "AUTH_USER_REGISTRATION_ROLE" "'$AIRFLOW_LDAP_USER_REGISTRATION_ROLE'" - - # Mapping from LDAP DN to list of FAB roles - airflow_webserver_conf_set "AUTH_ROLES_MAPPING" "$AIRFLOW_LDAP_ROLES_MAPPING" - - # Replace user's roles at login - airflow_webserver_conf_set "AUTH_ROLES_SYNC_AT_LOGIN" "$AIRFLOW_LDAP_ROLES_SYNC_AT_LOGIN" - - # Allowing/Denying of self signed certs for StartTLS OR SSL ldaps:// connections - airflow_webserver_conf_set "AUTH_LDAP_ALLOW_SELF_SIGNED" "$AIRFLOW_LDAP_ALLOW_SELF_SIGNED" - - # If StartTLS supply cert - if [[ "$AIRFLOW_LDAP_USE_TLS" == "True" ]]; then - airflow_webserver_conf_set "AUTH_LDAP_TLS_CACERTFILE" "$AIRFLOW_LDAP_TLS_CA_CERTIFICATE" - fi - fi -} - -######################## -# Set properties in Airflow's webserver_config.py -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_webserver_conf_set() { - local -r key="${1:?missing key}" - local -r value="${2:?missing key}" - shift 2 - - local -r file="$AIRFLOW_WEBSERVER_CONF_FILE" - # Check if the value was set before - if grep -q "^#*\\s*${key} =.*$" "$file"; then - # Update the existing key - replace_in_file "$file" "^#*\\s*${key} =.*$" "${key} = ${value}" false - else - # Add a new key - printf '\n%s = %s' "$key" "$value" >>"$file" - fi -} - -######################## -# Configure Airflow database -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_configure_database() { - local -r user=$(airflow_encode_url "$AIRFLOW_DATABASE_USERNAME") - local -r password=$(airflow_encode_url "$AIRFLOW_DATABASE_PASSWORD") - local extra_options - is_boolean_yes "$AIRFLOW_DATABASE_USE_SSL" && extra_options="?sslmode=require" - - info "Configuring Airflow database" - airflow_conf_set "core" "sql_alchemy_conn" "postgresql+psycopg2://${user}:${password}@${AIRFLOW_DATABASE_HOST}:${AIRFLOW_DATABASE_PORT_NUMBER}/${AIRFLOW_DATABASE_NAME}${extra_options:-}" -} - -######################## -# Return URL encoded string in the airflow conf format -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_encode_url() { - local -r url="${1?Missing url}" - - urlencode() { - old_lc_collate="${LC_COLLATE:-}" - LC_COLLATE=C - - local length="${#1}" - for (( i = 0; i < length; i++ )); do - local c="${1:$i:1}" - case $c in - [a-zA-Z0-9.~_-]) printf '%s' "$c" ;; - *) printf '%%%02X' "'$c" ;; - esac - done - - LC_COLLATE="$old_lc_collate" - } - - local -r url_encoded=$(urlencode "$url") - # Replace % by %% - echo "${url_encoded//\%/\%\%}" -} - -######################## -# Configure Airflow celery executor -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_configure_celery_executor() { - info "Configuring Celery Executor" - - # Configure celery Redis url - local -r redis_user=$(airflow_encode_url "$REDIS_USER") - local -r redis_password=$(airflow_encode_url "$REDIS_PASSWORD") - airflow_conf_set "celery" "broker_url" "redis://${redis_user}:${redis_password}@${REDIS_HOST}:${REDIS_PORT_NUMBER}/1" - is_boolean_yes "$AIRFLOW_REDIS_USE_SSL" && airflow_conf_set "celery" "redis_backend_use_ssl" "true" - - # Configure celery backend - local -r database_user=$(airflow_encode_url "$AIRFLOW_DATABASE_USERNAME") - local -r database_password=$(airflow_encode_url "$AIRFLOW_DATABASE_PASSWORD") - local database_extra_options - is_boolean_yes "$AIRFLOW_DATABASE_USE_SSL" && database_extra_options="?sslmode=require" - airflow_conf_set "celery" "result_backend" "db+postgresql://${database_user}:${database_password}@${AIRFLOW_DATABASE_HOST}:${AIRFLOW_DATABASE_PORT_NUMBER}/${AIRFLOW_DATABASE_NAME}${database_extra_options:-}" -} - -######################## -# Wait for PostgreSQL -# Arguments: -# None -# Returns: -# None -######################### -airflow_wait_for_postgresql() { - local -r postgresql_host="${1?Missing host}" - local -r postgresql_port="${2?Missing port}" - - info "Waiting for PostgreSQL to be available at ${postgresql_host}:${postgresql_port}..." - wait-for-port --host "$postgresql_host" "$postgresql_port" -} - -######################## -# Airflow create admin user -# Arguments: -# None -# Returns: -# None -######################### -airflow_create_admin_user() { - info "Creating Airflow admin user" - airflow_execute_command "create_user" "users create" "-r Admin -u ${AIRFLOW_USERNAME} -e ${AIRFLOW_EMAIL} -p ${AIRFLOW_PASSWORD} -f ${AIRFLOW_FIRSTNAME} -l ${AIRFLOW_LASTNAME}" -} - -######################## -# Airflow create pool -# Arguments: -# None -# Returns: -# None -######################### -airflow_create_pool() { - if [[ -n "$AIRFLOW_POOL_NAME" ]] && [[ -n "$AIRFLOW_POOL_SIZE" ]] && [[ -n "$AIRFLOW_POOL_DESC" ]]; then - info "Creating Airflow pool" - airflow_execute_command "pool" "pool" "-s ${AIRFLOW_POOL_NAME} ${AIRFLOW_POOL_SIZE} ${AIRFLOW_POOL_DESC}" - fi -} - -######################## -# Check if Airflow is running -# Globals: -# AIRFLOW_PID_FILE -# Arguments: -# None -# Returns: -# Whether Airflow is running -######################## -is_airflow_running() { - local pid - pid="$(get_pid_from_file "$AIRFLOW_PID_FILE")" - if [[ -n "$pid" ]]; then - is_service_running "$pid" - else - false - fi -} - -######################## -# Check if Airflow is running -# Globals: -# AIRFLOW_PID_FILE -# Arguments: -# None -# Returns: -# Whether Airflow is not running -######################## -is_airflow_not_running() { - ! is_airflow_running -} - -######################## -# Stop Airflow -# Globals: -# AIRFLOW* -# Arguments: -# None -# Returns: -# None -######################### -airflow_stop() { - info "Stopping Airflow..." - stop_service_using_pid "$AIRFLOW_PID_FILE" -} diff --git a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflowscheduler.sh b/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflowscheduler.sh deleted file mode 100644 index 8902232e78ea..000000000000 --- a/bitnami/airflow-scheduler/1/debian-10/rootfs/opt/bitnami/scripts/libairflowscheduler.sh +++ /dev/null @@ -1,153 +0,0 @@ -#!/bin/bash - -# Bitnami Airflow library - -# shellcheck disable=SC1091 - -# Load Generic Libraries -. /opt/bitnami/scripts/libfile.sh -. /opt/bitnami/scripts/liblog.sh -. /opt/bitnami/scripts/libnet.sh -. /opt/bitnami/scripts/libos.sh -. /opt/bitnami/scripts/libservice.sh -. /opt/bitnami/scripts/libvalidations.sh -. /opt/bitnami/scripts/libpersistence.sh - -# Load airflow library -. /opt/bitnami/scripts/libairflow.sh - -######################## -# Validate Airflow Scheduler inputs -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_scheduler_validate() { - # Check postgresql host - [[ -z "$AIRFLOW_WEBSERVER_HOST" ]] && print_validation_error "Missing AIRFLOW_WEBSERVER_HOST" - [[ -z "$AIRFLOW_WEBSERVER_PORT_NUMBER" ]] && print_validation_error "Missing AIRFLOW_WEBSERVER_PORT_NUMBER" - # Check postgresql host - [[ -z "$AIRFLOW_DATABASE_HOST" ]] && print_validation_error "Missing AIRFLOW_DATABASE_HOST" - - # Avoid to fail because of the above check - true -} - -######################## -# Ensure Airflow Scheduler is initialized -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_scheduler_initialize() { - # Change permissions if running as root - for dir in "$AIRFLOW_TMP_DIR" "$AIRFLOW_LOGS_DIR" "$AIRFLOW_SCHEDULER_LOGS_DIR" "$AIRFLOW_DATA_DIR"; do - ensure_dir_exists "$dir" - am_i_root && chown "$AIRFLOW_DAEMON_USER:$AIRFLOW_DAEMON_GROUP" "$dir" - done - - # The configuration file is not persisted. If it is not provided, generate it based on env vars - if [[ ! -f "$AIRFLOW_CONF_FILE" ]]; then - info "No injected configuration file found. Creating default config file" - airflow_scheduler_generate_config - else - info "Configuration file found, loading configuration" - fi - - # Check if Airflow has already been initialized and persisted in a previous run - local -r app_name="airflow" - if ! is_app_initialized "$app_name"; then - airflow_wait_for_postgresql "$AIRFLOW_DATABASE_HOST" "$AIRFLOW_DATABASE_PORT_NUMBER" - - info "Persisting Airflow installation" - persist_app "$app_name" "$AIRFLOW_DATA_TO_PERSIST" - else - # Check database connection - airflow_wait_for_postgresql "$AIRFLOW_DATABASE_HOST" "$AIRFLOW_DATABASE_PORT_NUMBER" - - # Restore persisted data - info "Restoring persisted Airflow installation" - restore_persisted_app "$app_name" "$AIRFLOW_DATA_TO_PERSIST" - - # Change the permissions after restoring the persisted data in case we are root - for dir in "$AIRFLOW_DATA_DIR" "$AIRFLOW_TMP_DIR" "$AIRFLOW_LOGS_DIR"; do - ensure_dir_exists "$dir" - am_i_root && chown "$AIRFLOW_DAEMON_USER:$AIRFLOW_DAEMON_GROUP" "$dir" - done - true # Avoid return false when I am not root - fi - - # Wait for airflow webserver to be available - airflow_scheduler_wait_for_webserver "$AIRFLOW_WEBSERVER_HOST" "$AIRFLOW_WEBSERVER_PORT_NUMBER" - [[ "$AIRFLOW_EXECUTOR" == "CeleryExecutor" || "$AIRFLOW_EXECUTOR" == "CeleryKubernetesExecutor" ]] && wait-for-port --host "$REDIS_HOST" "$REDIS_PORT_NUMBER" - - # Avoid to fail when the executor is not celery - true -} - -######################## -# Generate Airflow Scheduler conf file -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_scheduler_generate_config() { - # Generate Airflow default files - airflow_execute_command "version" "version" - - # Configure Airflow Hostname - [[ -n "$AIRFLOW_HOSTNAME_CALLABLE" ]] && airflow_conf_set "core" "hostname_callable" "$AIRFLOW_HOSTNAME_CALLABLE" - # Configure Airflow to load examples - if is_boolean_yes "$AIRFLOW_LOAD_EXAMPLES"; then - airflow_conf_set "core" "load_examples" "True" - else - airflow_conf_set "core" "load_examples" "False" - fi - - # Configure Airflow database - airflow_configure_database - - # Configure the Webserver port - airflow_conf_set "webserver" "web_server_port" "$AIRFLOW_WEBSERVER_PORT_NUMBER" - - # Setup the secret keys for database connection and flask application (fernet key and secret key) - # ref: https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#fernet-key - # ref: https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#secret-key - [[ -n "$AIRFLOW_FERNET_KEY" ]] && airflow_conf_set "core" "fernet_key" "$AIRFLOW_FERNET_KEY" - [[ -n "$AIRFLOW_SECRET_KEY" ]] && airflow_conf_set "webserver" "secret_key" "$AIRFLOW_SECRET_KEY" - - # Configure Airflow executor - airflow_conf_set "core" "executor" "$AIRFLOW_EXECUTOR" - [[ "$AIRFLOW_EXECUTOR" == "CeleryExecutor" || "$AIRFLOW_EXECUTOR" == "CeleryKubernetesExecutor" ]] && airflow_configure_celery_executor - true # Avoid the function to fail due to the check above -} - -######################## -# Wait Ariflow webserver -# Globals: -# AIRFLOW_* -# Arguments: -# None -# Returns: -# None -######################### -airflow_scheduler_wait_for_webserver() { - local -r webserver_host="${1:?missing database host}" - local -r webserver_port="${2:?missing database port}" - check_webserver_connection() { - wait-for-port --host "$webserver_host" "$webserver_port" - } - if ! retry_while "check_webserver_connection"; then - error "Could not connect to the Airflow webserver" - return 1 - fi -} diff --git a/bitnami/airflow-scheduler/README.md b/bitnami/airflow-scheduler/README.md index fa7d3334909c..db77756b57b7 100644 --- a/bitnami/airflow-scheduler/README.md +++ b/bitnami/airflow-scheduler/README.md @@ -380,10 +380,6 @@ $ docker run -d --name airflow -p 8080:8080 \ - The size of the container image has been decreased. - The configuration logic is now based on Bash scripts in the *rootfs/* folder. -## Branch Deprecation Notice - -Apache Airflow Scheduler's branch 1 is no longer maintained by upstream and is now internally tagged as to be deprecated. This branch will no longer be released in our catalog a month after this notice is published, but already released container images will still persist in the registries. Valid to be removed starting on: 11-13-2021 - ## Contributing We'd love for you to contribute to this container. You can request new features by creating an [issue](https://github.com/bitnami/bitnami-docker-airflow-scheduler/issues), or submit a [pull request](https://github.com/bitnami/bitnami-docker-airflow-scheduler/pulls) with your contribution.