[bitnami/airflow-scheduler] Release 2.5.1-debian-11-r2 (#21814)

Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>

Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>
This commit is contained in:
Bitnami Bot 2023-01-26 13:56:58 +01:00 committed by GitHub
parent d01fdb567d
commit 932dd3020e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 101 additions and 17 deletions

View File

@ -5,7 +5,7 @@ ARG TARGETARCH
LABEL org.opencontainers.image.authors="https://bitnami.com/contact" \
org.opencontainers.image.description="Application packaged by Bitnami" \
org.opencontainers.image.licenses="Apache-2.0" \
org.opencontainers.image.ref.name="2.5.1-debian-11-r1" \
org.opencontainers.image.ref.name="2.5.1-debian-11-r2" \
org.opencontainers.image.source="https://github.com/bitnami/containers/tree/main/bitnami/airflow-scheduler" \
org.opencontainers.image.title="airflow-scheduler" \
org.opencontainers.image.vendor="VMware, Inc." \
@ -19,7 +19,7 @@ ENV HOME="/" \
COPY prebuildfs /
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install required system packages and dependencies
RUN install_packages ca-certificates curl libbsd0 libbz2-1.0 libcdt5 libcgraph6 libcom-err2 libcrypt1 libedit2 libexpat1 libffi7 libgcc-s1 libgmp10 libgnutls30 libgssapi-krb5-2 libgvc6 libhogweed6 libicu67 libidn2-0 libk5crypto3 libkeyutils1 libkrb5-3 libkrb5support0 libldap-2.4-2 libltdl7 liblzma5 libmariadb3 libmd0 libncursesw6 libnettle8 libnsl2 libp11-kit0 libpathplan4 libpq5 libreadline8 libsasl2-2 libsasl2-modules libsqlite3-0 libssl1.1 libstdc++6 libtasn1-6 libtinfo6 libtirpc3 libunistring2 libuuid1 libxml2 libxslt1.1 locales netbase procps zlib1g
RUN install_packages ca-certificates curl libbsd0 libbz2-1.0 libcdt5 libcgraph6 libcom-err2 libcrypt1 libedit2 libexpat1 libffi7 libgcc-s1 libgmp10 libgnutls30 libgssapi-krb5-2 libgvc6 libhogweed6 libicu67 libidn2-0 libk5crypto3 libkeyutils1 libkrb5-3 libkrb5support0 libldap-2.4-2 libltdl7 liblzma5 libmariadb3 libmd0 libncursesw6 libnettle8 libnsl2 libp11-kit0 libpathplan4 libreadline8 libsasl2-2 libsasl2-modules libsqlite3-0 libssl1.1 libstdc++6 libtasn1-6 libtinfo6 libtirpc3 libunistring2 libuuid1 libxml2 libxslt1.1 locales netbase procps zlib1g
RUN mkdir -p /tmp/bitnami/pkg/cache/ && cd /tmp/bitnami/pkg/cache/ && \
COMPONENTS=( \
"wait-for-port-1.0.6-0-linux-${OS_ARCH}-debian-11" \
@ -27,7 +27,7 @@ RUN mkdir -p /tmp/bitnami/pkg/cache/ && cd /tmp/bitnami/pkg/cache/ && \
"postgresql-client-15.1.0-1-linux-${OS_ARCH}-debian-11" \
"ini-file-1.4.5-0-linux-${OS_ARCH}-debian-11" \
"gosu-1.16.0-1-linux-${OS_ARCH}-debian-11" \
"airflow-scheduler-2.5.1-0-linux-${OS_ARCH}-debian-11" \
"airflow-scheduler-2.5.1-1-linux-${OS_ARCH}-debian-11" \
) && \
for COMPONENT in "${COMPONENTS[@]}"; do \
if [ ! -f "${COMPONENT}.tar.gz" ]; then \
@ -56,7 +56,7 @@ ENV AIRFLOW_HOME="/opt/bitnami/airflow" \
BITNAMI_APP_NAME="airflow-scheduler" \
LANG="en_US.UTF-8" \
LANGUAGE="en_US:en" \
LD_LIBRARY_PATH="/opt/bitnami/python/lib/:/opt/bitnami/airflow/venv/lib/python3.8/site-packages/numpy.libs/:$LD_LIBRARY_PATH" \
LD_LIBRARY_PATH="/opt/bitnami/airflow/venv/lib/python3.8/site-packages/numpy.libs:/opt/bitnami/postgresql/lib:/opt/bitnami/python/lib:$LD_LIBRARY_PATH" \
LIBNSS_WRAPPER_PATH="/opt/bitnami/common/lib/libnss_wrapper.so" \
LNAME="airflow" \
NSS_WRAPPER_GROUP="/opt/bitnami/airflow/nss_group" \

View File

@ -1,10 +1,10 @@
{
"airflow-scheduler": {
"arch": "amd64",
"digest": "00f8afd30e0b29d5ede1a05a348fc2cba423a7c6df113e0930fe8457cc34e4a0",
"digest": "061f73d9229e75f91c2d8730426dc00e45f73077817ce21aa895cba4524c479c",
"distro": "debian-11",
"type": "NAMI",
"version": "2.5.1-0"
"version": "2.5.1-1"
},
"gosu": {
"arch": "amd64",

View File

@ -15,6 +15,7 @@ set -o pipefail
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libairflowscheduler.sh
# Ensure Airflow environment variables settings are valid
airflow_scheduler_validate
# Ensure Airflow daemon user exists when running as root

View File

@ -111,24 +111,55 @@ airflow_initialize() {
info "Trying to connect to the database server"
airflow_wait_for_postgresql_connection
# Check if the Airflow database has been already initialized
if ! debug_execute airflow db check-migrations; then
if ! airflow_execute db check-migrations; then
# Delete pid file
rm -f "$AIRFLOW_PID_FILE"
# Initialize database
info "Populating database"
debug_execute airflow db init
airflow_execute db init
airflow_create_admin_user
airflow_create_pool
else
# Upgrade database
info "Upgrading database schema"
debug_execute airflow db upgrade
airflow_execute db upgrade
true # Avoid return false when I am not root
fi
}
########################
# Executes the 'airflow' CLI with the specified arguments and print result to stdout/stderr
# Globals:
# AIRFLOW_*
# Arguments:
# $1..$n - Arguments to pass to the CLI call
# Returns:
# None
#########################
airflow_execute_print_output() {
# Run as web server user to avoid having to change permissions/ownership afterwards
if am_i_root; then
gosu "$AIRFLOW_DAEMON_USER" airflow "$@"
else
airflow "$@"
fi
}
########################
# Executes the 'airflow' CLI with the specified arguments
# Globals:
# AIRFLOW_*
# Arguments:
# $1..$n - Arguments to pass to the CLI call
# Returns:
# None
#########################
airflow_execute() {
debug_execute airflow_execute_print_output "$@"
}
########################
# Generate Airflow conf file
# Globals:
@ -140,7 +171,7 @@ airflow_initialize() {
#########################
airflow_generate_config() {
# Generate Airflow default files
debug_execute airflow version
airflow_execute version
# Setup Airflow base URL
airflow_configure_base_url
@ -376,7 +407,7 @@ airflow_configure_celery_executor() {
# true if the database connection succeeded, false otherwise
#########################
airflow_wait_for_postgresql_connection() {
if ! retry_while "debug_execute airflow db check"; then
if ! retry_while "airflow_execute db check"; then
error "Could not connect to the database"
return 1
fi
@ -391,7 +422,7 @@ airflow_wait_for_postgresql_connection() {
#########################
airflow_create_admin_user() {
info "Creating Airflow admin user"
debug_execute airflow users create -r "Admin" -u "$AIRFLOW_USERNAME" -e "$AIRFLOW_EMAIL" -p "$AIRFLOW_PASSWORD" -f "$AIRFLOW_FIRSTNAME" -l "$AIRFLOW_LASTNAME"
airflow_execute users create -r "Admin" -u "$AIRFLOW_USERNAME" -e "$AIRFLOW_EMAIL" -p "$AIRFLOW_PASSWORD" -f "$AIRFLOW_FIRSTNAME" -l "$AIRFLOW_LASTNAME"
}
########################
@ -404,7 +435,7 @@ airflow_create_admin_user() {
airflow_create_pool() {
if [[ -n "$AIRFLOW_POOL_NAME" ]] && [[ -n "$AIRFLOW_POOL_SIZE" ]] && [[ -n "$AIRFLOW_POOL_DESC" ]]; then
info "Creating Airflow pool"
debug_execute airflow pool -s "$AIRFLOW_POOL_NAME" "$AIRFLOW_POOL_SIZE" "$AIRFLOW_POOL_DESC"
airflow_execute pool -s "$AIRFLOW_POOL_NAME" "$AIRFLOW_POOL_SIZE" "$AIRFLOW_POOL_DESC"
fi
}
@ -428,7 +459,7 @@ is_airflow_running() {
}
########################
# Check if Airflow is running
# Check if Airflow is not running
# Globals:
# AIRFLOW_PID_FILE
# Arguments:
@ -453,3 +484,53 @@ airflow_stop() {
info "Stopping Airflow..."
stop_service_using_pid "$AIRFLOW_PID_FILE"
}
########################
# Check if airflow-exporter is running
# Globals:
# AIRFLOW_EXPORTER_PID_FILE
# Arguments:
# None
# Returns:
# Whether airflow-exporter is running
########################
is_airflow_exporter_running() {
# airflow-exporter does not create any PID file
# We regenerate the PID file for each time we query it to avoid getting outdated
pgrep -f "airflow-prometheus-exporter" | head -n 1 > "$AIRFLOW_EXPORTER_PID_FILE"
local pid
pid="$(get_pid_from_file "$AIRFLOW_EXPORTER_PID_FILE")"
if [[ -n "$pid" ]]; then
is_service_running "$pid"
else
false
fi
}
########################
# Check if airflow-exporter is not running
# Globals:
# AIRFLOW_EXPORTER_PID_FILE
# Arguments:
# None
# Returns:
# Whether airflow-exporter is not running
########################
is_airflow_exporter_not_running() {
! is_airflow_exporter_running
}
########################
# Stop airflow-exporter
# Globals:
# AIRFLOW*
# Arguments:
# None
# Returns:
# None
#########################
airflow_exporter_stop() {
info "Stopping airflow-exporter..."
stop_service_using_pid "$AIRFLOW_EXPORTER_PID_FILE"
}

View File

@ -69,11 +69,13 @@ airflow_scheduler_initialize() {
done
# Wait for airflow webserver to be available
info "Waiting for Airflow Webserser to be up"
info "Waiting for Airflow Webserver to be up"
airflow_scheduler_wait_for_webserver "$AIRFLOW_WEBSERVER_HOST" "$AIRFLOW_WEBSERVER_PORT_NUMBER"
[[ "$AIRFLOW_EXECUTOR" == "CeleryExecutor" || "$AIRFLOW_EXECUTOR" == "CeleryKubernetesExecutor" ]] && wait-for-port --host "$REDIS_HOST" "$REDIS_PORT_NUMBER"
if [[ "$AIRFLOW_EXECUTOR" == "CeleryExecutor" || "$AIRFLOW_EXECUTOR" == "CeleryKubernetesExecutor" ]]; then
wait-for-port --host "$REDIS_HOST" "$REDIS_PORT_NUMBER"
fi
# Avoid to fail when the executor is not celery
# Avoid exit code of previous commands to affect the result of this function
true
}