add LOGICAL_BACKUP_FILENAME_DATE_REGION env var to the logical backup cronjob
This commit is contained in:
		
							parent
							
								
									68c4b49636
								
							
						
					
					
						commit
						73f4b6a3f1
					
				|  | @ -551,6 +551,9 @@ spec: | |||
|                     default: "30 00 * * *" | ||||
|                   logical_backup_cronjob_environment_secret: | ||||
|                     type: string | ||||
|                   logical_backup_filename_date_format: | ||||
|                     type: string | ||||
|                     default: "+%s" | ||||
|               debug: | ||||
|                 type: object | ||||
|                 properties: | ||||
|  |  | |||
|  | @ -392,6 +392,8 @@ configLogicalBackup: | |||
|   logical_backup_schedule: "30 00 * * *" | ||||
|   # secret to be used as reference for env variables in cronjob | ||||
|   logical_backup_cronjob_environment_secret: "" | ||||
|   # backup filename date format | ||||
|   logical_backup_filename_date_format: "" | ||||
| 
 | ||||
| # automate creation of human users with teams API service | ||||
| configTeamsApi: | ||||
|  |  | |||
|  | @ -10,6 +10,7 @@ ALL_DB_SIZE_QUERY="select sum(pg_database_size(datname)::numeric) from pg_databa | |||
| PG_BIN=$PG_DIR/$PG_VERSION/bin | ||||
| DUMP_SIZE_COEFF=5 | ||||
| ERRORCOUNT=0 | ||||
| TIMESTAMP=$(eval time $LOGICAL_BACKUP_FILENAME_DATE_FORMAT) | ||||
| 
 | ||||
| TOKEN=$(cat /var/run/secrets/kubernetes.io/serviceaccount/token) | ||||
| KUBERNETES_SERVICE_PORT=${KUBERNETES_SERVICE_PORT:-443} | ||||
|  | @ -45,7 +46,7 @@ function compress { | |||
| } | ||||
| 
 | ||||
| function az_upload { | ||||
|     PATH_TO_BACKUP=$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$(date +%s).sql.gz | ||||
|     PATH_TO_BACKUP=$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$TIMESTAMP.sql.gz | ||||
| 
 | ||||
|     az storage blob upload --file "$1" --account-name "$LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_NAME" --account-key "$LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_KEY" -c "$LOGICAL_BACKUP_AZURE_STORAGE_CONTAINER" -n "$PATH_TO_BACKUP" | ||||
| } | ||||
|  | @ -107,7 +108,7 @@ function aws_upload { | |||
|     # mimic bucket setup from Spilo | ||||
|     # to keep logical backups at the same path as WAL | ||||
|     # NB: $LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX already contains the leading "/" when set by the Postgres Operator | ||||
|     PATH_TO_BACKUP=s3://$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$(date +%s).sql.gz | ||||
|     PATH_TO_BACKUP=s3://$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$TIMESTAMP.sql.gz | ||||
| 
 | ||||
|     args=() | ||||
| 
 | ||||
|  | @ -120,7 +121,7 @@ function aws_upload { | |||
| } | ||||
| 
 | ||||
| function gcs_upload { | ||||
|     PATH_TO_BACKUP=gs://$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$(date +%s).sql.gz | ||||
|     PATH_TO_BACKUP=gs://$LOGICAL_BACKUP_S3_BUCKET"/"$LOGICAL_BACKUP_S3_BUCKET_PREFIX"/"$SCOPE$LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX"/logical_backups/"$TIMESTAMP.sql.gz | ||||
| 
 | ||||
|     gsutil -o Credentials:gs_service_key_file=$LOGICAL_BACKUP_GOOGLE_APPLICATION_CREDENTIALS cp - "$PATH_TO_BACKUP" | ||||
| } | ||||
|  |  | |||
|  | @ -101,6 +101,7 @@ data: | |||
|   logical_backup_s3_sse: "AES256" | ||||
|   logical_backup_s3_retention_time: "" | ||||
|   logical_backup_schedule: "30 00 * * *" | ||||
|   logical_backup_filename_date_format: "+%s" | ||||
|   major_version_upgrade_mode: "manual" | ||||
|   # major_version_upgrade_team_allow_list: "" | ||||
|   master_dns_name_format: "{cluster}.{namespace}.{hostedzone}" | ||||
|  |  | |||
|  | @ -1800,6 +1800,9 @@ var OperatorConfigCRDResourceValidation = apiextv1.CustomResourceValidation{ | |||
| 							"logical_backup_cronjob_environment_secret": { | ||||
| 								Type: "string", | ||||
| 							}, | ||||
| 							"logical_backup_filename_date_format": { | ||||
| 								Type: "string", | ||||
| 							}, | ||||
| 						}, | ||||
| 					}, | ||||
| 					"debug": { | ||||
|  |  | |||
|  | @ -244,6 +244,7 @@ type OperatorLogicalBackupConfiguration struct { | |||
| 	MemoryRequest                string `json:"logical_backup_memory_request,omitempty"` | ||||
| 	CPULimit                     string `json:"logical_backup_cpu_limit,omitempty"` | ||||
| 	MemoryLimit                  string `json:"logical_backup_memory_limit,omitempty"` | ||||
| 	FilenameDateFormat           string `json:"logical_backup_filename_date_format,omitempty"` | ||||
| } | ||||
| 
 | ||||
| // PatroniConfiguration defines configuration for Patroni
 | ||||
|  |  | |||
|  | @ -1611,6 +1611,7 @@ func TestCompareLogicalBackupJob(t *testing.T) { | |||
| 					LogicalBackupS3SSE:                    "aws:kms", | ||||
| 					LogicalBackupS3RetentionTime:          "3 months", | ||||
| 					LogicalBackupCronjobEnvironmentSecret: "", | ||||
| 					LogicalBackupFilenameDateFormat:       "+%s", | ||||
| 				}, | ||||
| 			}, | ||||
| 		}, client, pg, logger, eventRecorder) | ||||
|  |  | |||
|  | @ -2486,6 +2486,10 @@ func (c *Cluster) generateLogicalBackupPodEnvVars() []v1.EnvVar { | |||
| 			Name:  "LOGICAL_BACKUP_S3_BUCKET_SCOPE_SUFFIX", | ||||
| 			Value: getBucketScopeSuffix(string(c.Postgresql.GetUID())), | ||||
| 		}, | ||||
| 		{ | ||||
| 			Name:  "LOGICAL_BACKUP_FILENAME_DATE_FORMAT", | ||||
| 			Value: c.OpConfig.LogicalBackup.LogicalBackupFilenameDateFormat, | ||||
| 		}, | ||||
| 	} | ||||
| 
 | ||||
| 	switch backupProvider { | ||||
|  |  | |||
|  | @ -3784,21 +3784,26 @@ func TestGenerateLogicalBackupPodEnvVars(t *testing.T) { | |||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       13, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_FILENAME_DATE_FORMAT", | ||||
| 			envVarValue:    "+%s", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       14, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_S3_REGION", | ||||
| 			envVarValue:    "eu-central-1", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       14, | ||||
| 			envIndex:       15, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_S3_ENDPOINT", | ||||
| 			envVarValue:    "", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       15, | ||||
| 			envIndex:       16, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_S3_SSE", | ||||
| 			envVarValue:    "", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       16, | ||||
| 			envIndex:       17, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_S3_RETENTION_TIME", | ||||
| 			envVarValue:    "1 month", | ||||
| 		}, | ||||
|  | @ -3811,7 +3816,7 @@ func TestGenerateLogicalBackupPodEnvVars(t *testing.T) { | |||
| 			envVarValue:    "gcs", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       13, | ||||
| 			envIndex:       14, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_GOOGLE_APPLICATION_CREDENTIALS", | ||||
| 			envVarValue:    "some-path-to-credentials", | ||||
| 		}, | ||||
|  | @ -3824,17 +3829,17 @@ func TestGenerateLogicalBackupPodEnvVars(t *testing.T) { | |||
| 			envVarValue:    "az", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       13, | ||||
| 			envIndex:       14, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_NAME", | ||||
| 			envVarValue:    "some-azure-storage-account-name", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       14, | ||||
| 			envIndex:       15, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_AZURE_STORAGE_CONTAINER", | ||||
| 			envVarValue:    "some-azure-storage-container", | ||||
| 		}, | ||||
| 		{ | ||||
| 			envIndex:       15, | ||||
| 			envIndex:       16, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_AZURE_STORAGE_ACCOUNT_KEY", | ||||
| 			envVarValue:    "some-azure-storage-account-key", | ||||
| 		}, | ||||
|  | @ -3842,7 +3847,7 @@ func TestGenerateLogicalBackupPodEnvVars(t *testing.T) { | |||
| 
 | ||||
| 	expectedLogicalBackupRetentionTime := []ExpectedValue{ | ||||
| 		{ | ||||
| 			envIndex:       16, | ||||
| 			envIndex:       17, | ||||
| 			envVarConstant: "LOGICAL_BACKUP_S3_RETENTION_TIME", | ||||
| 			envVarValue:    "3 months", | ||||
| 		}, | ||||
|  | @ -3861,6 +3866,7 @@ func TestGenerateLogicalBackupPodEnvVars(t *testing.T) { | |||
| 					LogicalBackupProvider:        "s3", | ||||
| 					LogicalBackupS3Bucket:        dummyBucket, | ||||
| 					LogicalBackupS3BucketPrefix:  "spilo", | ||||
| 					LogicalBackupFilenameDateFormat: "+%s", | ||||
| 					LogicalBackupS3Region:        "eu-central-1", | ||||
| 					LogicalBackupS3RetentionTime: "1 month", | ||||
| 				}, | ||||
|  |  | |||
|  | @ -200,6 +200,7 @@ func (c *Controller) importConfigurationFromCRD(fromCRD *acidv1.OperatorConfigur | |||
| 	result.LogicalBackupMemoryRequest = fromCRD.LogicalBackup.MemoryRequest | ||||
| 	result.LogicalBackupCPULimit = fromCRD.LogicalBackup.CPULimit | ||||
| 	result.LogicalBackupMemoryLimit = fromCRD.LogicalBackup.MemoryLimit | ||||
| 	result.LogicalBackupFilenameDateFormat = fromCRD.LogicalBackup.FilenameDateFormat | ||||
| 
 | ||||
| 	// debug config
 | ||||
| 	result.DebugLogging = fromCRD.OperatorDebug.DebugLogging | ||||
|  |  | |||
|  | @ -129,6 +129,7 @@ type LogicalBackup struct { | |||
| 	LogicalBackupSchedule                     string `name:"logical_backup_schedule" default:"30 00 * * *"` | ||||
| 	LogicalBackupDockerImage                  string `name:"logical_backup_docker_image" default:"ghcr.io/zalando/postgres-operator/logical-backup:v1.14.0"` | ||||
| 	LogicalBackupProvider                     string `name:"logical_backup_provider" default:"s3"` | ||||
| 	LogicalBackupFilenameDateFormat           string `name:"logical_backup_filename_date_format"` | ||||
| 	LogicalBackupAzureStorageAccountName      string `name:"logical_backup_azure_storage_account_name" default:""` | ||||
| 	LogicalBackupAzureStorageContainer        string `name:"logical_backup_azure_storage_container" default:""` | ||||
| 	LogicalBackupAzureStorageAccountKey       string `name:"logical_backup_azure_storage_account_key" default:""` | ||||
|  |  | |||
		Loading…
	
		Reference in New Issue