2.5.1-debian-10-r2 release

This commit is contained in:
Bitnami Bot 2020-06-18 15:47:34 +00:00
parent 0cb65b41da
commit 358eafae8b
10 changed files with 93 additions and 250 deletions

View File

@ -19,8 +19,7 @@ RUN apt-get update && apt-get upgrade -y && \
COPY rootfs /
RUN /opt/bitnami/scripts/spring-cloud-dataflow/postunpack.sh
ENV BITNAMI_APP_NAME="spring-cloud-dataflow" \
BITNAMI_IMAGE_VERSION="2.5.1-debian-10-r1" \
JAVA_TOOL_OPTIONS="-Duser.home=/bitnami/spring-cloud-dataflow" \
BITNAMI_IMAGE_VERSION="2.5.1-debian-10-r2" \
PATH="/opt/bitnami/java/bin:/opt/bitnami/common/bin:$PATH"
USER 1001

View File

@ -6,21 +6,24 @@ services:
restart: always
environment:
- SERVER_PORT=9393
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
# enable advances features
# configuring database
- SPRING_DATASOURCE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_DATASOURCE_USERNAME=bn_dataflow
- SPRING_DATASOURCE_PASSWORD=bn_dataflow
- SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver
# we use mariadb 10.2+ so we need to set hibernate dialect.
- spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect
# enable advance features
- SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true
- SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED=true
# configure dataflow stream
- SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI=http://spring-cloud-skipper:7577/api
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.host=rabbitmq
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.port=5672
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.username=user
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.password=bitnami
ports:
- '9393:9393'
- '9000-9099:9000-9099'
depends_on:
- mariadb-dataflow
- spring-cloud-skipper
@ -30,11 +33,15 @@ services:
restart: always
environment:
- SERVER_PORT=7577
- SPRING_CLOUD_SKIPPER_DATABASE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_CLOUD_SKIPPER_DATABASE_USERNAME=bn_skipper
- SPRING_CLOUD_SKIPPER_DATABASE_PASSWORD=bn_skipper
- SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_LOCAL_ACCOUNTS_DEFAULT_PORTRANGE_LOW=20000
- SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_LOCAL_ACCOUNTS_DEFAULT_PORTRANGE_HIGH=20100
- SPRING_DATASOURCE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_DATASOURCE_USERNAME=bn_skipper
- SPRING_DATASOURCE_PASSWORD=bn_skipper
- SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver
- spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect
ports:
- '9100-9199:9100-9199'
- "20000-20100:20000-20100"
depends_on:
- mariadb-skipper
- rabbitmq

View File

@ -1,162 +0,0 @@
#!/bin/bash
#
# Bitnami Spring Cloud Data Flow library
# shellcheck disable=SC1091
# Load Generic Libraries
. /opt/bitnami/scripts/libfile.sh
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/liblog.sh
. /opt/bitnami/scripts/libnet.sh
. /opt/bitnami/scripts/libservice.sh
. /opt/bitnami/scripts/libvalidations.sh
########################
# Validate settings in SPRING_CLOUD_DATAFLOW_* environment variables
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_validate() {
info "Validating settings in SPRING_CLOUD_DATAFLOW_* env vars"
local error_code=0
print_validation_error() {
error "$1"
error_code=1
}
if [[ "$SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API" = "true" ]]; then
if is_empty_value "$SPRING_CLOUD_KUBERNETES_SECRETS_PATHS"; then
print_validation_error "You set the environment variable SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. A Kubernetes secrect is expected to be mounted in SPRING_CLOUD_KUBERNETES_SECRETS_PATHS."
else
warn "Using Kubernetes Secrets."
fi
is_empty_value "$SPRING_CLOUD_KUBERNETES_CONFIG_NAME" && print_validation_error "If SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. You must set a ConfigMap name in SPRING_CLOUD_KUBERNETES_CONFIG_NAME."
fi
if [[ "$SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED" = "true" ]]; then
is_empty_value "$SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI" && print_validation_error "If SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true then you must set a skipper server URI in SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI"
fi
! is_empty_value "$SERVER_PORT" && ! validate_port -unprivileged "$SERVER_PORT" && print_validation_error "SERVER_PORT with value = ${SERVER_PORT} is not a valid port."
[[ "$error_code" -eq 0 ]] || return "$error_code"
}
########################
# Creates Spring Cloud Data Flow default configuration file
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_create_default_config() {
info "Creating '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' as the main configuration file with default values"
cat > "$SPRING_CLOUD_DATAFLOW_CONF_FILE" <<EOF
spring:
cloud:
config:
enabled: ${SPRING_CLOUD_CONFIG_ENABLED_DEFAULT}
datasource:
testOnBorrow: true
validationQuery: SELECT 1
maven:
localRepository: ${SPRING_CLOUD_DATAFLOW_VOLUME_DIR}/.m2/repository/
EOF
}
########################
# Update Spring Cloud Data Flow configuration file with user custom inputs
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_update_custom_config() {
! is_empty_value "$SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED" && dataflow_conf_set "spring.cloud.config.enabled" "$SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED"
if [[ "$SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API" = "false" ]]; then
# Database setting
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_URL" && dataflow_conf_set "spring.datasource.url" "$SPRING_CLOUD_DATAFLOW_DATABASE_URL"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME" && dataflow_conf_set "spring.datasource.username" "$SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD" && dataflow_conf_set "spring.datasource.password" "$SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" && dataflow_conf_set "spring.datasource.driver-class-name" "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER"
if ! is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_URL"; then
is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" && dataflow_conf_set "spring.datasource.driver-class-name" "org.mariadb.jdbc.Driver"
if [[ "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER" = "org.mariadb.jdbc.Driver" ]] || is_empty_value "$SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER"; then
dataflow_conf_set "spring.jpa.properties.hibernate.dialect" "org.hibernate.dialect.MariaDB102Dialect"
fi
fi
local -r spring_stream_prop="spring.cloud.dataflow.applicationProperties.stream"
# Kafka settings
local -r kafka_prop="${spring_stream_prop}.spring.cloud.stream.kafka"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI" && dataflow_conf_set "${kafka_prop}.binder.brokers" "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI" && \
dataflow_conf_set "${kafka_prop}.streams.binder.brokers" "$SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI" && dataflow_conf_set "${kafka_prop}.binder.zkNodes" "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI" && \
dataflow_conf_set "${kafka_prop}.streams.binder.zkNodes" "$SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI"
# RabbitMQ settings
local -r rabbitmq_prop="${spring_stream_prop}.spring.rabbitmq"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST" && dataflow_conf_set "${rabbitmq_prop}.host" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT" && dataflow_conf_set "${rabbitmq_prop}.port" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME" && dataflow_conf_set "${rabbitmq_prop}.username" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME"
! is_empty_value "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD" && dataflow_conf_set "${rabbitmq_prop}.password" "$SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD"
fi
# Avoid exit code of previous commands to affect the result of this function
true
}
########################
# Add or modify an entry in the Spring Cloud Data Flow configuration file ("$SPRING_CLOUD_DATAFLOW_CONF_FILE")
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# $1 - Spring Cloud Data Flow variable name
# $2 - Value to assign to the Spring Cloud Data Flow variable
# $3 - Whether the value is a literal, or if instead it should be quoted (default: no)
# Returns:
# None
#########################
dataflow_conf_set() {
local -r key="${1:?key missing}"
local -r value="${2:?value missing}"
info "Setting ${key} option"
debug "Setting ${key} to '${value}' in dataflow configuration"
yq w -i "$SPRING_CLOUD_DATAFLOW_CONF_FILE" "${key}" "${value}"
}
########################
# Ensure Spring Cloud Data Flow is initialized
# Globals:
# SPRING_CLOUD_DATAFLOW_*
# Arguments:
# None
# Returns:
# None
#########################
dataflow_initialize() {
if is_file_writable "$SPRING_CLOUD_DATAFLOW_CONF_FILE"; then
info "Updating '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' with custom configuration"
dataflow_update_custom_config
else
warn "The Spring Cloud Data Flow configuration file '${SPRING_CLOUD_DATAFLOW_CONF_FILE}' is not writable. Configurations based on environment variables will not be applied for this file."
fi
}

View File

@ -19,7 +19,7 @@ export BITNAMI_DEBUG="${BITNAMI_DEBUG:-false}"
# variable will be overridden with the value specified in that file
spring_cloud_dataflow_env_vars=(
SERVER_PORT
SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED
SPRING_CLOUD_CONFIG_ENABLED
SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API
SPRING_CLOUD_KUBERNETES_CONFIG_NAME
SPRING_CLOUD_KUBERNETES_SECRETS_PATHS
@ -27,16 +27,6 @@ spring_cloud_dataflow_env_vars=(
SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED
SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED
SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI
SPRING_CLOUD_DATAFLOW_DATABASE_URL
SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME
SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD
SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER
SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI
SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME
SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD
)
for env_var in "${spring_cloud_dataflow_env_vars[@]}"; do
@ -52,22 +42,16 @@ unset spring_cloud_dataflow_env_vars
export SPRING_CLOUD_DATAFLOW_BASE_DIR="${BITNAMI_ROOT_DIR}/spring-cloud-dataflow"
export SPRING_CLOUD_DATAFLOW_VOLUME_DIR="${BITNAMI_VOLUME_DIR}/spring-cloud-dataflow"
export SPRING_CLOUD_DATAFLOW_CONF_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/conf"
export SPRING_CLOUD_DATAFLOW_LOGS_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/logs"
export SPRING_CLOUD_DATAFLOW_TMP_DIR="${SPRING_CLOUD_DATAFLOW_BASE_DIR}/tmp"
export SPRING_CLOUD_DATAFLOW_CONF_FILE="${SPRING_CLOUD_DATAFLOW_CONF_DIR}/application.yml"
export SPRING_CLOUD_DATAFLOW_M2_DIR="/.m2"
# System users (when running with a privileged user)
export SPRING_CLOUD_DATAFLOW_DAEMON_USER="dataflow"
export SPRING_CLOUD_DATAFLOW_DAEMON_GROUP="dataflow"
# SPRING CLOUD DATAFLOW Build-time defaults conf, these variable are used to create default config file at build time.
export SPRING_CLOUD_CONFIG_ENABLED_DEFAULT="false"
# SPRING CLOUD DATAFLOW authentication.
# Dataflow settings
export SERVER_PORT="${SERVER_PORT:-}"
export SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED="${SPRING_CLOUD_DATAFLOW_CLOUD_CONFIG_ENABLED:-}"
export SPRING_CLOUD_CONFIG_ENABLED="${SPRING_CLOUD_CONFIG_ENABLED:-false}"
export SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API="${SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API:-false}"
export SPRING_CLOUD_KUBERNETES_CONFIG_NAME="${SPRING_CLOUD_KUBERNETES_CONFIG_NAME:-}"
export SPRING_CLOUD_KUBERNETES_SECRETS_PATHS="${SPRING_CLOUD_KUBERNETES_SECRETS_PATHS:-}"
@ -76,18 +60,4 @@ export SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED="${SPRING_CLOUD_DATAFLOW_FEA
export SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED="${SPRING_CLOUD_DATAFLOW_FEATURES_SCHEDULES_ENABLED:-false}"
export SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI="${SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI:-}"
# Database settings
export SPRING_CLOUD_DATAFLOW_DATABASE_URL="${SPRING_CLOUD_DATAFLOW_DATABASE_URL:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME="${SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD="${SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD:-}"
export SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER="${SPRING_CLOUD_DATAFLOW_DATABASE_DRIVER:-}"
# Messaging settings
export SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI="${SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI="${SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME:-}"
export SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD="${SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD:-}"
# Custom environment variables may be defined below

View File

@ -11,7 +11,6 @@ set -o pipefail
# Load libraries
. /opt/bitnami/scripts/libbitnami.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh

View File

@ -11,17 +11,11 @@ set -o pipefail
# Load libraries
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
# Configure Spring Cloud Data Flow options based on build-time defaults
info "Configuring default Spring Cloud Data Flow options"
ensure_dir_exists "$SPRING_CLOUD_DATAFLOW_CONF_DIR"
dataflow_create_default_config
for dir in "${SPRING_CLOUD_DATAFLOW_VOLUME_DIR}" "${SPRING_CLOUD_DATAFLOW_CONF_DIR}" "${SPRING_CLOUD_DATAFLOW_LOGS_DIR}" "${SPRING_CLOUD_DATAFLOW_TMP_DIR}"; do
for dir in "${SPRING_CLOUD_DATAFLOW_VOLUME_DIR}" "${SPRING_CLOUD_DATAFLOW_CONF_DIR}" "${SPRING_CLOUD_DATAFLOW_M2_DIR}"; do
ensure_dir_exists "$dir"
chmod -R g+rwX "$dir"
done

View File

@ -11,7 +11,6 @@ set -o pipefail
# Load libraries
. /opt/bitnami/scripts/liblog.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
. /opt/bitnami/scripts/libos.sh
# Load Spring Cloud Data Flow environment variables
@ -20,7 +19,7 @@ set -o pipefail
info "** Starting Spring Cloud Data Flow **"
__run_cmd="java"
__run_flags=("-jar" "${SPRING_CLOUD_DATAFLOW_BASE_DIR}/spring-cloud-dataflow.jar" "--spring.config.additional-location=${SPRING_CLOUD_DATAFLOW_CONF_FILE}" "$@")
__run_flags=("-jar" "-Duser.home=${HOME}" "${SPRING_CLOUD_DATAFLOW_BASE_DIR}/spring-cloud-dataflow.jar" "--spring.config.additional-location=${SPRING_CLOUD_DATAFLOW_CONF_FILE}" "$@")
if am_i_root; then
exec gosu "$SPRING_CLOUD_DATAFLOW_DAEMON_USER" "${__run_cmd}" "${__run_flags[@]}"

View File

@ -12,14 +12,37 @@ set -o pipefail
# Load Generic Libraries
. /opt/bitnami/scripts/libvalidations.sh
. /opt/bitnami/scripts/libos.sh
. /opt/bitnami/scripts/libspringclouddataflow.sh
# Load Spring Cloud Data Flow environment variables
. /opt/bitnami/scripts/spring-cloud-dataflow-env.sh
# Ensure Spring Cloud Data Flow environment variables settings are valid
dataflow_validate
# Ensure 'daemon' user exists when running as 'root'
am_i_root && ensure_user_exists "$SPRING_CLOUD_DATAFLOW_DAEMON_USER" "$SPRING_CLOUD_DATAFLOW_DAEMON_GROUP"
# Ensure Spring Cloud Data Flow is initialized
dataflow_initialize
# Validations
# Ensure Spring Cloud Data Flow environment variables settings are valid
info "Validating settings in SPRING_CLOUD_DATAFLOW_* env vars"
error_code=0
print_validation_error() {
error "$1"
error_code=1
}
if [[ "$SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API" = "true" ]]; then
if is_empty_value "$SPRING_CLOUD_KUBERNETES_SECRETS_PATHS"; then
print_validation_error "You set the environment variable SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. A Kubernetes secrect is expected to be mounted in SPRING_CLOUD_KUBERNETES_SECRETS_PATHS."
else
warn "Using Kubernetes Secrets."
fi
is_empty_value "$SPRING_CLOUD_KUBERNETES_CONFIG_NAME" && print_validation_error "If SPRING_CLOUD_KUBERNETES_SECRETS_ENABLE_API=true. You must set a ConfigMap name in SPRING_CLOUD_KUBERNETES_CONFIG_NAME."
fi
if [[ "$SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED" = "true" ]]; then
is_empty_value "$SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI" && print_validation_error "If SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true then you must set a skipper server URI in SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI"
fi
! is_empty_value "$SERVER_PORT" && ! validate_port -unprivileged "$SERVER_PORT" && print_validation_error "SERVER_PORT with value = ${SERVER_PORT} is not a valid port."
exit "$error_code"

View File

@ -26,7 +26,7 @@ $ docker-compose up -d
> This [CVE scan report](https://quay.io/repository/bitnami/spring-cloud-dataflow?tab=tags) contains a security report with all open CVEs. To get the list of actionable security issues, find the "latest" tag, click the vulnerability report link under the corresponding "Security scan" field and then select the "Only show fixable" filter on the next page.
# How to deploy Thanos in Kubernetes?
# How to deploy Data Flow in Kubernetes?
Deploying Bitnami applications as Helm Charts is the easiest way to get started with our applications on Kubernetes. Read more about the installation in the [Bitnami Spring Cloud Data Flow Chart GitHub repository](https://github.com/bitnami/charts/tree/master/bitnami/spring-cloud-dataflow).
@ -39,7 +39,7 @@ Non-root container images add an extra layer of security and are generally recom
Learn more about the Bitnami tagging policy and the difference between rolling tags and immutable tags [in our documentation page](https://docs.bitnami.com/tutorials/understand-rolling-tags-containers/).
* [`2-debian-10`, `2.5.1-debian-10-r1`, `2`, `2.5.1`, `latest` (2/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/blob/2.5.1-debian-10-r1/2/debian-10/Dockerfile)
* [`2-debian-10`, `2.5.1-debian-10-r2`, `2`, `2.5.1`, `latest` (2/debian-10/Dockerfile)](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow/blob/2.5.1-debian-10-r2/2/debian-10/Dockerfile)
Subscribe to project updates by watching the [bitnami/spring-cloud-dataflow GitHub repo](https://github.com/bitnami/bitnami-docker-spring-cloud-dataflow).
@ -71,11 +71,16 @@ You can use some environment variable in order to configure the deployment of sp
A relational database is used to store stream and task definitions as well as the state of executed tasks. Spring Cloud Data Flow provides schemas for H2, MySQL, Oracle, PostgreSQL, Db2, and SQL Server. Use the following environment to configure the connection.
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
- SPRING_DATASOURCE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_DATASOURCE_USERNAME=bn_dataflow
- SPRING_DATASOURCE_PASSWORD=bn_dataflow
- SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver
## Configuring advances features
If you are using MariaDB 10.2 or greater, you must also set the following environment variable:
- spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect
## Configuring additional features
Spring Cloud Data Flow Server offers specific set of features that can be enabled/disabled when launching.
@ -90,15 +95,17 @@ In order to deploy streams using data flow you will require [Spring Cloud Skippe
### Using RabbitMQ
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.host=rabbitmq
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.port=5672
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.username=user
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.password=bitnami
### Using Kafka
- SPRING_CLOUD_DATAFLOW_STREAM_KAFKA_URI=PLAINTEXT://kafka-broker:9092
- SPRING_CLOUD_DATAFLOW_STREAM_ZOOKEEPER_URI=zookeeper:2181
- spring.cloud.dataflow.applicationProperties.stream.spring.cloud.stream.kafka.binder.brokers=PLAINTEXT://kafka-broker:9092
- spring.cloud.dataflow.applicationProperties.stream.spring.cloud.stream.kafka.streams.binder.brokers=PLAINTEXT://kafka-broker:9092
- spring.cloud.dataflow.applicationProperties.stream.spring.cloud.stream.kafka.binder.zkNodes=zookeeper:2181
- spring.cloud.dataflow.applicationProperties.stream.spring.cloud.stream.kafka.streams.binder.zkNodes=zookeeper:2181
Consult the [spring-cloud-dataflow Reference Documentation](https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/#configuration-local) to find the completed list of documentation.

View File

@ -6,21 +6,24 @@ services:
restart: always
environment:
- SERVER_PORT=9393
- SPRING_CLOUD_DATAFLOW_DATABASE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_CLOUD_DATAFLOW_DATABASE_USERNAME=bn_dataflow
- SPRING_CLOUD_DATAFLOW_DATABASE_PASSWORD=bn_dataflow
# enable advances features
# configuring database
- SPRING_DATASOURCE_URL=jdbc:mariadb://mariadb-dataflow:3306/dataflow?useMysqlMetadata=true
- SPRING_DATASOURCE_USERNAME=bn_dataflow
- SPRING_DATASOURCE_PASSWORD=bn_dataflow
- SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver
# we use mariadb 10.2+ so we need to set hibernate dialect.
- spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect
# enable advance features
- SPRING_CLOUD_DATAFLOW_FEATURES_STREAMS_ENABLED=true
- SPRING_CLOUD_DATAFLOW_FEATURES_TASKS_ENABLED=true
# configure dataflow stream
- SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI=http://spring-cloud-skipper:7577/api
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_HOST=rabbitmq
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PORT=5672
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_USERNAME=user
- SPRING_CLOUD_DATAFLOW_STREAM_RABBITMQ_PASSWORD=bitnami
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.host=rabbitmq
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.port=5672
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.username=user
- spring.cloud.dataflow.applicationProperties.stream.spring.rabbitmq.password=bitnami
ports:
- '9393:9393'
- '9000-9099:9000-9099'
depends_on:
- mariadb-dataflow
- spring-cloud-skipper
@ -30,11 +33,15 @@ services:
restart: always
environment:
- SERVER_PORT=7577
- SPRING_CLOUD_SKIPPER_DATABASE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_CLOUD_SKIPPER_DATABASE_USERNAME=bn_skipper
- SPRING_CLOUD_SKIPPER_DATABASE_PASSWORD=bn_skipper
- SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_LOCAL_ACCOUNTS_DEFAULT_PORTRANGE_LOW=20000
- SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_LOCAL_ACCOUNTS_DEFAULT_PORTRANGE_HIGH=20100
- SPRING_DATASOURCE_URL=jdbc:mariadb://mariadb-skipper:3306/skipper?useMysqlMetadata=true
- SPRING_DATASOURCE_USERNAME=bn_skipper
- SPRING_DATASOURCE_PASSWORD=bn_skipper
- SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver
- spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect
ports:
- '9100-9199:9100-9199'
- "20000-20100:20000-20100"
depends_on:
- mariadb-skipper
- rabbitmq