[bitnami/spark] Release 3.2.3-debian-11-r7 (#17736)
Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com> Signed-off-by: Bitnami Containers <bitnami-bot@vmware.com>
This commit is contained in:
parent
fa2f5c90c7
commit
366d4b0308
|
|
@ -5,7 +5,7 @@ ARG TARGETARCH
|
|||
|
||||
LABEL org.opencontainers.image.authors="https://bitnami.com/contact" \
|
||||
org.opencontainers.image.description="Application packaged by Bitnami" \
|
||||
org.opencontainers.image.ref.name="3.2.3-debian-11-r6" \
|
||||
org.opencontainers.image.ref.name="3.2.3-debian-11-r7" \
|
||||
org.opencontainers.image.source="https://github.com/bitnami/containers/tree/main/bitnami/spark" \
|
||||
org.opencontainers.image.title="spark" \
|
||||
org.opencontainers.image.vendor="VMware, Inc." \
|
||||
|
|
@ -26,7 +26,7 @@ RUN mkdir -p /tmp/bitnami/pkg/cache/ && cd /tmp/bitnami/pkg/cache/ && \
|
|||
"python-3.8.16-0-linux-${OS_ARCH}-debian-11" \
|
||||
"java-1.8.352-2-linux-${OS_ARCH}-debian-11" \
|
||||
"spark-3.2.3-0-linux-${OS_ARCH}-debian-11" \
|
||||
"gosu-1.14.0-156-linux-${OS_ARCH}-debian-11" \
|
||||
"gosu-1.16.0-0-linux-${OS_ARCH}-debian-11" \
|
||||
) && \
|
||||
for COMPONENT in "${COMPONENTS[@]}"; do \
|
||||
if [ ! -f "${COMPONENT}.tar.gz" ]; then \
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
{
|
||||
"gosu": {
|
||||
"arch": "amd64",
|
||||
"digest": "7969f4cc8ad0a260f891cccf5694686a513f4706b48771d605645be1f3f072e2",
|
||||
"digest": "c42abbc5d57ba4c33c89e4daf46c33b0173565fbf533ef7a60281cf3283f611f",
|
||||
"distro": "debian-11",
|
||||
"type": "NAMI",
|
||||
"version": "1.14.0-156"
|
||||
"version": "1.16.0-0"
|
||||
},
|
||||
"java": {
|
||||
"arch": "amd64",
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ Additionally, SSL configuration can be easily activated following the next steps
|
|||
|
||||
### Setting up an Apache Spark Cluster
|
||||
|
||||
An Apache Spark cluster can easily be setup with the default docker-compose.yml file from the root of this repo. The docker-compose includes two different services, `spark-master` and `spark-worker.`
|
||||
A Apache Spark cluster can easily be setup with the default docker-compose.yml file from the root of this repo. The docker-compose includes two different services, `spark-master` and `spark-worker.`
|
||||
|
||||
By default, when you deploy the docker-compose file you will get an Apache Spark cluster with 1 master and 1 worker.
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue