[bitnami/spark] Release 3.3.1-debian-11-r20 (#17895)

Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>

Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>
This commit is contained in:
Bitnami Bot 2022-12-23 13:48:50 +01:00 committed by GitHub
parent 4a30c969b2
commit 90e3cfe865
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 106 additions and 68 deletions

View File

@ -5,7 +5,7 @@ ARG TARGETARCH
LABEL org.opencontainers.image.authors="https://bitnami.com/contact" \
org.opencontainers.image.description="Application packaged by Bitnami" \
org.opencontainers.image.ref.name="3.3.1-debian-11-r19" \
org.opencontainers.image.ref.name="3.3.1-debian-11-r20" \
org.opencontainers.image.source="https://github.com/bitnami/containers/tree/main/bitnami/spark" \
org.opencontainers.image.title="spark" \
org.opencontainers.image.vendor="VMware, Inc." \
@ -25,7 +25,7 @@ RUN mkdir -p /tmp/bitnami/pkg/cache/ && cd /tmp/bitnami/pkg/cache/ && \
COMPONENTS=( \
"python-3.8.16-0-linux-${OS_ARCH}-debian-11" \
"java-1.8.352-2-linux-${OS_ARCH}-debian-11" \
"spark-3.3.1-1-linux-${OS_ARCH}-debian-11" \
"spark-3.3.1-2-linux-${OS_ARCH}-debian-11" \
"gosu-1.16.0-0-linux-${OS_ARCH}-debian-11" \
) && \
for COMPONENT in "${COMPONENTS[@]}"; do \
@ -42,7 +42,6 @@ RUN apt-get autoremove --purge -y curl && \
apt-get clean && rm -rf /var/lib/apt/lists /var/cache/apt/archives
RUN chmod g+rwX /opt/bitnami
RUN mkdir /.local && chmod g+rwX /.local
RUN chown -R 1001:root /opt/bitnami/spark
COPY rootfs /
RUN /opt/bitnami/scripts/spark/postunpack.sh

View File

@ -25,6 +25,6 @@
"digest": "77a1002ce641de9f9ff5141fa9dd43bec9d147353b54458c858f936219aa2493",
"distro": "debian-11",
"type": "NAMI",
"version": "3.3.1-1"
"version": "3.3.1-2"
}
}

View File

@ -12,62 +12,6 @@
# Functions
########################
# Load global variables used on Spark configuration
# Globals:
# SPARK_*
# Arguments:
# None
# Returns:
# Series of exports to be used as 'eval' arguments
#########################
spark_env() {
cat <<"EOF"
# Spark directories
export SPARK_BASEDIR="/opt/bitnami/spark"
export SPARK_CONFDIR="${SPARK_BASEDIR}/conf"
export SPARK_WORKDIR="${SPARK_BASEDIR}/work"
export SPARK_CONF_FILE="${SPARK_CONFDIR}/spark-defaults.conf"
export SPARK_LOGDIR="${SPARK_BASEDIR}/logs"
export SPARK_TMPDIR="${SPARK_BASEDIR}/tmp"
export SPARK_JARSDIR="${SPARK_BASEDIR}/jars"
# Spark basic cluster
export SPARK_MODE="${SPARK_MODE:-master}"
export SPARK_MASTER_URL="${SPARK_MASTER_URL:-spark://spark-master:7077}"
export SPARK_NO_DAEMONIZE="${SPARK_NO_DAEMONIZE:-true}"
# RPC Authentication and Encryption
export SPARK_RPC_AUTHENTICATION_ENABLED="${SPARK_RPC_AUTHENTICATION_ENABLED:-no}"
export SPARK_RPC_AUTHENTICATION_SECRET="${SPARK_RPC_AUTHENTICATION_SECRET:-}"
export SPARK_RPC_ENCRYPTION_ENABLED="${SPARK_RPC_ENCRYPTION_ENABLED:-no}"
# Local Storage Encryption
export SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED="${SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED:-no}"
# SSL
export SPARK_SSL_ENABLED="${SPARK_SSL_ENABLED:-no}"
export SPARK_SSL_KEY_PASSWORD="${SPARK_SSL_KEY_PASSWORD:-}"
export SPARK_SSL_KEYSTORE_PASSWORD="${SPARK_SSL_KEYSTORE_PASSWORD:-}"
export SPARK_SSL_KEYSTORE_FILE="${SPARK_SSL_KEYSTORE_FILE:-${SPARK_CONFDIR}/certs/spark-keystore.jks}"
export SPARK_SSL_TRUSTSTORE_PASSWORD="${SPARK_SSL_TRUSTSTORE_PASSWORD:-}"
export SPARK_SSL_TRUSTSTORE_FILE="${SPARK_SSL_TRUSTSTORE_FILE:-${SPARK_CONFDIR}/certs/spark-truststore.jks}"
export SPARK_SSL_NEED_CLIENT_AUTH="${SPARK_SSL_NEED_CLIENT_AUTH:-yes}"
export SPARK_SSL_PROTOCOL="${SPARK_SSL_PROTOCOL:-TLSv1.2}"
export SPARK_WEBUI_SSL_PORT="${SPARK_WEBUI_SSL_PORT:-}"
# Monitoring
export SPARK_METRICS_ENABLED="${SPARK_METRICS_ENABLED:-false}"
# System Users
export SPARK_DAEMON_USER="spark"
export SPARK_DAEMON_GROUP="spark"
# Paths
export SPARK_INITSCRIPTS_DIR="/docker-entrypoint-initdb.d"
EOF
}
########################
# Validate settings in SPARK_* env vars
# Globals:

View File

@ -0,0 +1,95 @@
#!/bin/bash
#
# Environment configuration for spark
# The values for all environment variables will be set in the below order of precedence
# 1. Custom environment variables defined below after Bitnami defaults
# 2. Constants defined in this file (environment variables with no default), i.e. BITNAMI_ROOT_DIR
# 3. Environment variables overridden via external files using *_FILE variables (see below)
# 4. Environment variables set externally (i.e. current Bash context/Dockerfile/userdata)
# Load logging library
# shellcheck disable=SC1090,SC1091
. /opt/bitnami/scripts/liblog.sh
export BITNAMI_ROOT_DIR="/opt/bitnami"
export BITNAMI_VOLUME_DIR="/bitnami"
# Logging configuration
export MODULE="${MODULE:-spark}"
export BITNAMI_DEBUG="${BITNAMI_DEBUG:-false}"
# By setting an environment variable matching *_FILE to a file path, the prefixed environment
# variable will be overridden with the value specified in that file
spark_env_vars=(
SPARK_MODE
SPARK_MASTER_URL
SPARK_NO_DAEMONIZE
SPARK_RPC_AUTHENTICATION_ENABLED
SPARK_RPC_AUTHENTICATION_SECRET
SPARK_RPC_ENCRYPTION_ENABLED
SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED
SPARK_SSL_ENABLED
SPARK_SSL_KEY_PASSWORD
SPARK_SSL_KEYSTORE_PASSWORD
SPARK_SSL_KEYSTORE_FILE
SPARK_SSL_TRUSTSTORE_PASSWORD
SPARK_SSL_TRUSTSTORE_FILE
SPARK_SSL_NEED_CLIENT_AUTH
SPARK_SSL_PROTOCOL
SPARK_WEBUI_SSL_PORT
SPARK_METRICS_ENABLED
)
for env_var in "${spark_env_vars[@]}"; do
file_env_var="${env_var}_FILE"
if [[ -n "${!file_env_var:-}" ]]; then
if [[ -r "${!file_env_var:-}" ]]; then
export "${env_var}=$(< "${!file_env_var}")"
unset "${file_env_var}"
else
warn "Skipping export of '${env_var}'. '${!file_env_var:-}' is not readable."
fi
fi
done
unset spark_env_vars
# Paths
export SPARK_BASEDIR="${BITNAMI_ROOT_DIR}/spark"
export SPARK_CONFDIR="${SPARK_BASEDIR}/conf"
export SPARK_WORKDIR="${SPARK_BASEDIR}/work"
export SPARK_CONF_FILE="${SPARK_CONFDIR}/spark-defaults.conf"
export SPARK_LOGDIR="${SPARK_BASEDIR}/logs"
export SPARK_TMPDIR="${SPARK_BASEDIR}/tmp"
export SPARK_JARSDIR="${SPARK_BASEDIR}/jars"
export SPARK_INITSCRIPTS_DIR="/docker-entrypoint-initdb.d"
# Spark configuration
export SPARK_MODE="${SPARK_MODE:-master}"
export SPARK_MASTER_URL="${SPARK_MASTER_URL:-spark://spark-master:7077}"
export SPARK_NO_DAEMONIZE="${SPARK_NO_DAEMONIZE:-true}"
# RPC Authentication and Encryption
export SPARK_RPC_AUTHENTICATION_ENABLED="${SPARK_RPC_AUTHENTICATION_ENABLED:-no}"
export SPARK_RPC_AUTHENTICATION_SECRET="${SPARK_RPC_AUTHENTICATION_SECRET:-}"
export SPARK_RPC_ENCRYPTION_ENABLED="${SPARK_RPC_ENCRYPTION_ENABLED:-no}"
# Local Storage Encryption
export SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED="${SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED:-no}"
# SSL/TLS configuration
export SPARK_SSL_ENABLED="${SPARK_SSL_ENABLED:-no}"
export SPARK_SSL_KEY_PASSWORD="${SPARK_SSL_KEY_PASSWORD:-}"
export SPARK_SSL_KEYSTORE_PASSWORD="${SPARK_SSL_KEYSTORE_PASSWORD:-}"
export SPARK_SSL_KEYSTORE_FILE="${SPARK_SSL_KEYSTORE_FILE:-${SPARK_CONFDIR}/certs/spark-keystore.jks}"
export SPARK_SSL_TRUSTSTORE_PASSWORD="${SPARK_SSL_TRUSTSTORE_PASSWORD:-}"
export SPARK_SSL_TRUSTSTORE_FILE="${SPARK_SSL_TRUSTSTORE_FILE:-${SPARK_CONFDIR}/certs/spark-truststore.jks}"
export SPARK_SSL_NEED_CLIENT_AUTH="${SPARK_SSL_NEED_CLIENT_AUTH:-yes}"
export SPARK_SSL_PROTOCOL="${SPARK_SSL_PROTOCOL:-TLSv1.2}"
export SPARK_WEBUI_SSL_PORT="${SPARK_WEBUI_SSL_PORT:-}"
export SPARK_METRICS_ENABLED="${SPARK_METRICS_ENABLED:-false}"
# Spark system parameters
export SPARK_DAEMON_USER="spark"
export SPARK_DAEMON_GROUP="spark"
# Custom environment variables may be defined below

View File

@ -11,8 +11,8 @@ set -o pipefail
. /opt/bitnami/scripts/libbitnami.sh
. /opt/bitnami/scripts/libspark.sh
# Load Spark environment variables
eval "$(spark_env)"
# Load Spark environment settings
. /opt/bitnami/scripts/spark-env.sh
print_welcome_page

View File

@ -6,8 +6,8 @@
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libspark.sh
# Load Spark environment variables
eval "$(spark_env)"
# Load Spark environment settings
. /opt/bitnami/scripts/spark-env.sh
for dir in "$SPARK_TMPDIR" "$SPARK_LOGDIR" "$SPARK_CONFDIR" "$SPARK_WORKDIR" "$SPARK_JARSDIR"; do
ensure_dir_exists "$dir"

View File

@ -11,8 +11,8 @@ set -o pipefail
. /opt/bitnami/scripts/libspark.sh
. /opt/bitnami/scripts/libos.sh
# Load Spark environment variables
eval "$(spark_env)"
# Load Spark environment settings
. /opt/bitnami/scripts/spark-env.sh
if [ "$SPARK_MODE" == "master" ]; then
# Master constants

View File

@ -12,8 +12,8 @@ set -o pipefail
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libspark.sh
# Load Spark environment variables
eval "$(spark_env)"
# Load Spark environment settings
. /opt/bitnami/scripts/spark-env.sh
# Ensure Spark environment variables settings are valid
spark_validate