[bitnami/spark] Release 3.2.4-debian-11-r173 (#62100)

Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>
This commit is contained in:
Bitnami Bot 2024-02-19 15:38:56 +01:00 committed by GitHub
parent bc11e97398
commit 97e7826e34
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 28 additions and 16 deletions

View File

@ -8,10 +8,10 @@ ARG TARGETARCH
LABEL com.vmware.cp.artifact.flavor="sha256:1e1b4657a77f0d47e9220f0c37b9bf7802581b93214fff7d1bd2364c8bf22e8e" \
org.opencontainers.image.base.name="docker.io/bitnami/minideb:bullseye" \
org.opencontainers.image.created="2024-02-16T15:41:12Z" \
org.opencontainers.image.created="2024-02-19T12:31:13Z" \
org.opencontainers.image.description="Application packaged by VMware, Inc" \
org.opencontainers.image.licenses="Apache-2.0" \
org.opencontainers.image.ref.name="3.2.4-debian-11-r172" \
org.opencontainers.image.ref.name="3.2.4-debian-11-r173" \
org.opencontainers.image.title="spark" \
org.opencontainers.image.vendor="VMware, Inc." \
org.opencontainers.image.version="3.2.4"

View File

@ -58,6 +58,7 @@ unset spark_env_vars
# Paths
export SPARK_BASE_DIR="${BITNAMI_ROOT_DIR}/spark"
export SPARK_CONF_DIR="${SPARK_BASE_DIR}/conf"
export SPARK_DEFAULT_CONF_DIR="${SPARK_BASE_DIR}/conf.default"
export SPARK_WORK_DIR="${SPARK_BASE_DIR}/work"
export SPARK_CONF_FILE="${SPARK_CONF_DIR}/spark-defaults.conf"
export SPARK_LOG_DIR="${SPARK_BASE_DIR}/logs"

View File

@ -18,6 +18,12 @@ set -o pipefail
print_welcome_page
# We add the copy from default config in the entrypoint to not break users
# bypassing the setup.sh logic. If the file already exists do not overwrite (in
# case someone mounts a configuration file in /opt/bitnami/spark/conf)
debug "Copying files from $SPARK_DEFAULT_CONF_DIR to $SPARK_CONF_DIR"
cp -nr "$SPARK_DEFAULT_CONF_DIR"/. "$SPARK_CONF_DIR"
if [ ! $EUID -eq 0 ] && [ -e "$LIBNSS_WRAPPER_PATH" ]; then
echo "spark:x:$(id -u):$(id -g):Spark:$SPARK_HOME:/bin/false" > "$NSS_WRAPPER_PASSWD"
echo "spark:x:$(id -g):" > "$NSS_WRAPPER_GROUP"

View File

@ -11,10 +11,14 @@
# Load Spark environment settings
. /opt/bitnami/scripts/spark-env.sh
for dir in "$SPARK_TMP_DIR" "$SPARK_LOG_DIR" "$SPARK_CONF_DIR" "$SPARK_WORK_DIR" "$SPARK_JARS_DIR"; do
for dir in "$SPARK_TMP_DIR" "$SPARK_LOG_DIR" "$SPARK_CONF_DIR" "$SPARK_DEFAULT_CONF_DIR" "$SPARK_WORK_DIR" "$SPARK_JARS_DIR"; do
ensure_dir_exists "$dir"
configure_permissions_ownership "$dir" -d "775" -f "664" -g "root"
done
# Set correct owner in installation directory
chown -R "1001:root" "$SPARK_BASE_DIR"
# Copy all initially generated configuration files to the default directory
# (this is to avoid breaking when entrypoint is being overridden)
cp -r "${SPARK_CONF_DIR}/"* "$SPARK_DEFAULT_CONF_DIR"

View File

@ -93,19 +93,20 @@ docker build -t bitnami/APP:latest .
#### Read-only environment variables
| Name | Description | Value |
|-------------------------|--------------------------------|-----------------------------------------|
| `SPARK_BASE_DIR` | Spark installation directory. | `${BITNAMI_ROOT_DIR}/spark` |
| `SPARK_CONF_DIR` | Spark configuration directory. | `${SPARK_BASE_DIR}/conf` |
| `SPARK_WORK_DIR` | Spark workspace directory. | `${SPARK_BASE_DIR}/work` |
| `SPARK_CONF_FILE` | Spark configuration file path. | `${SPARK_CONF_DIR}/spark-defaults.conf` |
| `SPARK_LOG_DIR` | Spark logs directory. | `${SPARK_BASE_DIR}/logs` |
| `SPARK_TMP_DIR` | Spark tmp directory. | `${SPARK_BASE_DIR}/tmp` |
| `SPARK_JARS_DIR` | Spark jar directory. | `${SPARK_BASE_DIR}/jars` |
| `SPARK_INITSCRIPTS_DIR` | Spark init scripts directory. | `/docker-entrypoint-initdb.d` |
| `SPARK_USER` | Spark user. | `spark` |
| `SPARK_DAEMON_USER` | Spark system user. | `spark` |
| `SPARK_DAEMON_GROUP` | Spark system group. | `spark` |
| Name | Description | Value |
|--------------------------|----------------------------------------|-----------------------------------------|
| `SPARK_BASE_DIR` | Spark installation directory. | `${BITNAMI_ROOT_DIR}/spark` |
| `SPARK_CONF_DIR` | Spark configuration directory. | `${SPARK_BASE_DIR}/conf` |
| `SPARK_DEFAULT_CONF_DIR` | Spark default configuration directory. | `${SPARK_BASE_DIR}/conf.default` |
| `SPARK_WORK_DIR` | Spark workspace directory. | `${SPARK_BASE_DIR}/work` |
| `SPARK_CONF_FILE` | Spark configuration file path. | `${SPARK_CONF_DIR}/spark-defaults.conf` |
| `SPARK_LOG_DIR` | Spark logs directory. | `${SPARK_BASE_DIR}/logs` |
| `SPARK_TMP_DIR` | Spark tmp directory. | `${SPARK_BASE_DIR}/tmp` |
| `SPARK_JARS_DIR` | Spark jar directory. | `${SPARK_BASE_DIR}/jars` |
| `SPARK_INITSCRIPTS_DIR` | Spark init scripts directory. | `/docker-entrypoint-initdb.d` |
| `SPARK_USER` | Spark user. | `spark` |
| `SPARK_DAEMON_USER` | Spark system user. | `spark` |
| `SPARK_DAEMON_GROUP` | Spark system group. | `spark` |
Additionally, more environment variables natively supported by Apache Spark can be found [at the official documentation](https://spark.apache.org/docs/latest/spark-standalone.html#cluster-launch-scripts).