github.com/argoproj-labs/argocd-operator@v0.10.0/build/util/util.sh (about) 1 #!/bin/bash -e 2 3 # Copyright 2020 ArgoCD Operator Developers 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 # See the License for the specific language governing permissions and 15 # limitations under the License. 16 17 BACKUP_SCRIPT=$0 18 BACKUP_ACTION=$1 19 BACKUP_LOCATION=$2 20 BACKUP_FILENAME=argocd-backup.yaml 21 BACKUP_EXPORT_LOCATION=/tmp/${BACKUP_FILENAME} 22 BACKUP_ENCRYPT_LOCATION=/backups/${BACKUP_FILENAME} 23 BACKUP_KEY_LOCATION=/secrets/backup.key 24 DEFAULT_BACKUP_BUCKET_REGION="us-east-1" 25 26 export_argocd () { 27 echo "exporting argo-cd" 28 create_backup 29 encrypt_backup 30 push_backup 31 echo "argo-cd export complete" 32 } 33 34 create_backup () { 35 echo "creating argo-cd backup" 36 argocd admin export > ${BACKUP_EXPORT_LOCATION} 37 } 38 39 encrypt_backup () { 40 echo "encrypting argo-cd backup" 41 openssl enc -aes-256-cbc -pbkdf2 -pass file:${BACKUP_KEY_LOCATION} -in ${BACKUP_EXPORT_LOCATION} -out ${BACKUP_ENCRYPT_LOCATION} 42 rm ${BACKUP_EXPORT_LOCATION} 43 } 44 45 push_backup () { 46 case ${BACKUP_LOCATION} in 47 "aws") 48 push_aws 49 ;; 50 "azure") 51 push_azure 52 ;; 53 "gcp") 54 push_gcp 55 ;; 56 *) 57 # local and unsupported backends 58 esac 59 } 60 61 push_aws () { 62 echo "pushing argo-cd backup to aws" 63 BACKUP_BUCKET_NAME=`cat /secrets/aws.bucket.name` 64 BACKUP_BUCKET_REGION=DEFAULT_BACKUP_BUCKET_REGION 65 # Set BACKUP_BUCKET_REGION to us-east-1(DEFAULT_BACKUP_BUCKET_REGION) if a user does not provide aws.bucket.region 66 # in aws-backup-secret 67 BACKUP_BUCKET_REGION_FILE=/secrets/aws.bucket.region 68 if [[ -f "$BACKUP_BUCKET_REGION_FILE" ]]; then 69 BACKUP_BUCKET_REGION=`cat /secrets/aws.bucket.region` 70 fi 71 BACKUP_BUCKET_URI="s3://${BACKUP_BUCKET_NAME}" 72 # Create bucket only if it does not exist 73 if aws s3 ls $BACKUP_BUCKET_URI 2>&1 | grep -q 'An error occurred' 74 then 75 aws s3 mb ${BACKUP_BUCKET_URI} --region ${BACKUP_BUCKET_REGION} 76 aws s3api put-public-access-block --bucket ${BACKUP_BUCKET_NAME} --public-access-block-configuration "BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true" 77 fi 78 aws s3 cp ${BACKUP_ENCRYPT_LOCATION} ${BACKUP_BUCKET_URI}/${BACKUP_FILENAME} 79 } 80 81 push_azure () { 82 echo "pushing argo-cd backup to azure" 83 BACKUP_STORAGE_ACCOUNT=`cat /secrets/azure.storage.account` 84 BACKUP_SERVICE_ID=`cat /secrets/azure.service.id` 85 BACKUP_CERT_PATH="/secrets/azure.service.cert" 86 BACKUP_TENANT_ID=`cat /secrets/azure.tenant.id` 87 BACKUP_CONTAINER_NAME=`cat /secrets/azure.container.name` 88 az login --service-principal -u ${BACKUP_SERVICE_ID} -p ${BACKUP_CERT_PATH} --tenant ${BACKUP_TENANT_ID} 89 az storage container create --auth-mode login --account-name ${BACKUP_STORAGE_ACCOUNT} --name ${BACKUP_CONTAINER_NAME} 90 az storage blob upload --auth-mode login --account-name ${BACKUP_STORAGE_ACCOUNT} --container-name ${BACKUP_CONTAINER_NAME} --file ${BACKUP_ENCRYPT_LOCATION} --name ${BACKUP_FILENAME} 91 } 92 93 push_gcp () { 94 echo "pushing argo-cd backup to gcp" 95 BACKUP_BUCKET_KEY="/secrets/gcp.key.file" 96 BACKUP_PROJECT_ID=`cat /secrets/gcp.project.id` 97 BACKUP_BUCKET_NAME=`cat /secrets/gcp.bucket.name` 98 BACKUP_BUCKET_URI="gs://${BACKUP_BUCKET_NAME}" 99 gcloud auth activate-service-account --key-file=${BACKUP_BUCKET_KEY} 100 gsutil mb -b on -p ${BACKUP_PROJECT_ID} ${BACKUP_BUCKET_URI} || true 101 gsutil cp ${BACKUP_ENCRYPT_LOCATION} ${BACKUP_BUCKET_URI}/${BACKUP_FILENAME} 102 } 103 104 import_argocd () { 105 echo "importing argo-cd" 106 pull_backup 107 decrypt_backup 108 load_backup 109 echo "argo-cd import complete" 110 } 111 112 pull_backup () { 113 case ${BACKUP_LOCATION} in 114 "aws") 115 pull_aws 116 ;; 117 "azure") 118 pull_azure 119 ;; 120 "gcp") 121 pull_gcp 122 ;; 123 *) 124 # local and unsupported backends 125 esac 126 } 127 128 pull_aws () { 129 echo "pulling argo-cd backup from aws" 130 BACKUP_BUCKET_NAME=`cat /secrets/aws.bucket.name` 131 BACKUP_BUCKET_URI="s3://${BACKUP_BUCKET_NAME}" 132 aws s3 cp ${BACKUP_BUCKET_URI}/${BACKUP_FILENAME} ${BACKUP_ENCRYPT_LOCATION} 133 } 134 135 pull_azure () { 136 echo "pulling argo-cd backup from azure" 137 BACKUP_STORAGE_ACCOUNT=`cat /secrets/azure.storage.account` 138 BACKUP_SERVICE_ID=`cat /secrets/azure.service.id` 139 BACKUP_CERT_PATH="/secrets/azure.service.cert" 140 BACKUP_TENANT_ID=`cat /secrets/azure.tenant.id` 141 BACKUP_CONTAINER_NAME=`cat /secrets/azure.container.name` 142 az login --service-principal -u ${BACKUP_SERVICE_ID} -p ${BACKUP_CERT_PATH} --tenant ${BACKUP_TENANT_ID} 143 az storage blob download --auth-mode login --account-name ${BACKUP_STORAGE_ACCOUNT} --container-name ${BACKUP_CONTAINER_NAME} --file ${BACKUP_ENCRYPT_LOCATION} --name ${BACKUP_FILENAME} 144 } 145 146 pull_gcp () { 147 echo "pulling argo-cd backup from gcp" 148 BACKUP_BUCKET_KEY="/secrets/gcp.key.file" 149 BACKUP_PROJECT_ID=`cat /secrets/gcp.project.id` 150 BACKUP_BUCKET_NAME=`cat /secrets/gcp.bucket.name` 151 BACKUP_BUCKET_URI="gs://${BACKUP_BUCKET_NAME}" 152 gcloud auth activate-service-account --key-file=${BACKUP_BUCKET_KEY} 153 gsutil cp ${BACKUP_BUCKET_URI}/${BACKUP_FILENAME} ${BACKUP_ENCRYPT_LOCATION} 154 } 155 156 decrypt_backup () { 157 echo "decrypting argo-cd backup" 158 openssl enc -aes-256-cbc -d -pbkdf2 -pass file:${BACKUP_KEY_LOCATION} -in ${BACKUP_ENCRYPT_LOCATION} -out ${BACKUP_EXPORT_LOCATION} 159 } 160 161 load_backup () { 162 echo "loading argo-cd backup" 163 argocd admin import - < ${BACKUP_EXPORT_LOCATION} 164 } 165 166 usage () { 167 echo "usage: ${BACKUP_SCRIPT} export|import" 168 } 169 170 case ${BACKUP_ACTION} in 171 "export") 172 export_argocd 173 ;; 174 "import") 175 import_argocd 176 ;; 177 # TODO: Implement finalize action to clean up cloud resources! 178 *) 179 usage 180 esac