diff --git a/bitnami/spark/3.2/debian-11/Dockerfile b/bitnami/spark/3.2/debian-11/Dockerfile index 36c00796ed9e..c08838b7fdc2 100644 --- a/bitnami/spark/3.2/debian-11/Dockerfile +++ b/bitnami/spark/3.2/debian-11/Dockerfile @@ -8,10 +8,10 @@ ARG TARGETARCH LABEL com.vmware.cp.artifact.flavor="sha256:1e1b4657a77f0d47e9220f0c37b9bf7802581b93214fff7d1bd2364c8bf22e8e" \ org.opencontainers.image.base.name="docker.io/bitnami/minideb:bullseye" \ - org.opencontainers.image.created="2024-02-16T15:41:12Z" \ + org.opencontainers.image.created="2024-02-19T12:31:13Z" \ org.opencontainers.image.description="Application packaged by VMware, Inc" \ org.opencontainers.image.licenses="Apache-2.0" \ - org.opencontainers.image.ref.name="3.2.4-debian-11-r172" \ + org.opencontainers.image.ref.name="3.2.4-debian-11-r173" \ org.opencontainers.image.title="spark" \ org.opencontainers.image.vendor="VMware, Inc." \ org.opencontainers.image.version="3.2.4" diff --git a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark-env.sh b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark-env.sh index 45530bb3923a..15bde9710e2d 100644 --- a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark-env.sh +++ b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark-env.sh @@ -58,6 +58,7 @@ unset spark_env_vars # Paths export SPARK_BASE_DIR="${BITNAMI_ROOT_DIR}/spark" export SPARK_CONF_DIR="${SPARK_BASE_DIR}/conf" +export SPARK_DEFAULT_CONF_DIR="${SPARK_BASE_DIR}/conf.default" export SPARK_WORK_DIR="${SPARK_BASE_DIR}/work" export SPARK_CONF_FILE="${SPARK_CONF_DIR}/spark-defaults.conf" export SPARK_LOG_DIR="${SPARK_BASE_DIR}/logs" diff --git a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/entrypoint.sh b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/entrypoint.sh index ec07b0552d2c..153bf9a545ef 100755 --- a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/entrypoint.sh +++ b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/entrypoint.sh @@ -18,6 +18,12 @@ set -o pipefail print_welcome_page +# We add the copy from default config in the entrypoint to not break users +# bypassing the setup.sh logic. If the file already exists do not overwrite (in +# case someone mounts a configuration file in /opt/bitnami/spark/conf) +debug "Copying files from $SPARK_DEFAULT_CONF_DIR to $SPARK_CONF_DIR" +cp -nr "$SPARK_DEFAULT_CONF_DIR"/. "$SPARK_CONF_DIR" + if [ ! $EUID -eq 0 ] && [ -e "$LIBNSS_WRAPPER_PATH" ]; then echo "spark:x:$(id -u):$(id -g):Spark:$SPARK_HOME:/bin/false" > "$NSS_WRAPPER_PASSWD" echo "spark:x:$(id -g):" > "$NSS_WRAPPER_GROUP" diff --git a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/postunpack.sh b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/postunpack.sh index f330738021fe..3528fd707644 100755 --- a/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/postunpack.sh +++ b/bitnami/spark/3.2/debian-11/rootfs/opt/bitnami/scripts/spark/postunpack.sh @@ -11,10 +11,14 @@ # Load Spark environment settings . /opt/bitnami/scripts/spark-env.sh -for dir in "$SPARK_TMP_DIR" "$SPARK_LOG_DIR" "$SPARK_CONF_DIR" "$SPARK_WORK_DIR" "$SPARK_JARS_DIR"; do +for dir in "$SPARK_TMP_DIR" "$SPARK_LOG_DIR" "$SPARK_CONF_DIR" "$SPARK_DEFAULT_CONF_DIR" "$SPARK_WORK_DIR" "$SPARK_JARS_DIR"; do ensure_dir_exists "$dir" configure_permissions_ownership "$dir" -d "775" -f "664" -g "root" done # Set correct owner in installation directory chown -R "1001:root" "$SPARK_BASE_DIR" + +# Copy all initially generated configuration files to the default directory +# (this is to avoid breaking when entrypoint is being overridden) +cp -r "${SPARK_CONF_DIR}/"* "$SPARK_DEFAULT_CONF_DIR" \ No newline at end of file diff --git a/bitnami/spark/README.md b/bitnami/spark/README.md index d7437199ac78..8030a17fb98e 100644 --- a/bitnami/spark/README.md +++ b/bitnami/spark/README.md @@ -93,19 +93,20 @@ docker build -t bitnami/APP:latest . #### Read-only environment variables -| Name | Description | Value | -|-------------------------|--------------------------------|-----------------------------------------| -| `SPARK_BASE_DIR` | Spark installation directory. | `${BITNAMI_ROOT_DIR}/spark` | -| `SPARK_CONF_DIR` | Spark configuration directory. | `${SPARK_BASE_DIR}/conf` | -| `SPARK_WORK_DIR` | Spark workspace directory. | `${SPARK_BASE_DIR}/work` | -| `SPARK_CONF_FILE` | Spark configuration file path. | `${SPARK_CONF_DIR}/spark-defaults.conf` | -| `SPARK_LOG_DIR` | Spark logs directory. | `${SPARK_BASE_DIR}/logs` | -| `SPARK_TMP_DIR` | Spark tmp directory. | `${SPARK_BASE_DIR}/tmp` | -| `SPARK_JARS_DIR` | Spark jar directory. | `${SPARK_BASE_DIR}/jars` | -| `SPARK_INITSCRIPTS_DIR` | Spark init scripts directory. | `/docker-entrypoint-initdb.d` | -| `SPARK_USER` | Spark user. | `spark` | -| `SPARK_DAEMON_USER` | Spark system user. | `spark` | -| `SPARK_DAEMON_GROUP` | Spark system group. | `spark` | +| Name | Description | Value | +|--------------------------|----------------------------------------|-----------------------------------------| +| `SPARK_BASE_DIR` | Spark installation directory. | `${BITNAMI_ROOT_DIR}/spark` | +| `SPARK_CONF_DIR` | Spark configuration directory. | `${SPARK_BASE_DIR}/conf` | +| `SPARK_DEFAULT_CONF_DIR` | Spark default configuration directory. | `${SPARK_BASE_DIR}/conf.default` | +| `SPARK_WORK_DIR` | Spark workspace directory. | `${SPARK_BASE_DIR}/work` | +| `SPARK_CONF_FILE` | Spark configuration file path. | `${SPARK_CONF_DIR}/spark-defaults.conf` | +| `SPARK_LOG_DIR` | Spark logs directory. | `${SPARK_BASE_DIR}/logs` | +| `SPARK_TMP_DIR` | Spark tmp directory. | `${SPARK_BASE_DIR}/tmp` | +| `SPARK_JARS_DIR` | Spark jar directory. | `${SPARK_BASE_DIR}/jars` | +| `SPARK_INITSCRIPTS_DIR` | Spark init scripts directory. | `/docker-entrypoint-initdb.d` | +| `SPARK_USER` | Spark user. | `spark` | +| `SPARK_DAEMON_USER` | Spark system user. | `spark` | +| `SPARK_DAEMON_GROUP` | Spark system group. | `spark` | Additionally, more environment variables natively supported by Apache Spark can be found [at the official documentation](https://spark.apache.org/docs/latest/spark-standalone.html#cluster-launch-scripts).