github.com/treeverse/lakefs@v1.24.1-0.20240520134607-95648127bfb0/test/rclone_export/run-test.sh (about) 1 #!/bin/bash 2 3 REPOSITORY=${REPOSITORY:-example} 4 5 6 run_cmd_and_validate() { 7 echo $1 8 echo "Running: $2" 9 newVariable=$(eval $2) 10 if [[ $? != "0" ]] 11 then 12 echo "FAILED!" && exit 1 13 fi 14 echo "Output:" 15 echo $newVariable 16 } 17 18 # Run Export without previous commit 19 echo "Current working directory: ${WORKING_DIRECTORY}" 20 run_cmd_and_validate "upload file_one" "docker compose exec -T lakefs lakectl fs upload lakefs://${REPOSITORY}/main/a/file_one.txt --source /local/file_one.txt" 21 22 run_cmd_and_validate "export no previous commit" "docker compose --project-directory ${WORKING_DIRECTORY} run --rm lakefs-export ${REPOSITORY} ${EXPORT_LOCATION} --branch=main" 23 24 # Validate export 25 lakectl_out=$(mktemp) 26 s3_out=$(mktemp) 27 trap 'rm -f -- $s3_out $lakectl_out' INT TERM EXIT 28 29 docker compose exec -T lakefs lakectl fs ls --recursive --no-color lakefs://${REPOSITORY}/main/ | awk '{print $8}' | sort > ${lakectl_out} 30 31 n=$(grep -o "/" <<< ${EXPORT_LOCATION} | wc -l) 32 33 if [[ "${EXPORT_LOCATION}" == */ ]] 34 then 35 n=$((n-2)) 36 else 37 n=$((n-1)) 38 fi 39 40 aws s3 ls --recursive ${EXPORT_LOCATION} | awk '{print $4}'| cut -d/ -f ${n}- | grep -v EXPORT_ | sort > ${s3_out} 41 42 echo $(cat $lakectl_out) 43 echo $(cat $s3_out) 44 45 if ! diff ${lakectl_out} ${s3_out}; then 46 echo "export location and lakefs should contain same objects" 47 exit 1 48 fi 49 50 # Run Export with previous commit - add file and also delete an existing file 51 run_cmd_and_validate "upload file_two" "docker compose exec -T lakefs lakectl fs upload lakefs://${REPOSITORY}/main/a/file_two.txt --source /local/file_two.txt" 52 run_cmd_and_validate "delete file_one" "docker compose exec -T lakefs lakectl fs rm lakefs://${REPOSITORY}/main/a/file_one.txt" 53 run_cmd_and_validate "commit changes" "docker compose exec -T lakefs lakectl commit lakefs://${REPOSITORY}/main --message='removed file_one and added file_two'" 54 run_cmd_and_validate "export previous commit" "docker compose --project-directory ${WORKING_DIRECTORY} run --rm lakefs-export ${REPOSITORY} ${EXPORT_LOCATION} --branch=main --prev_commit_id='some_commit'" 55 56 # Validate sync 57 lakectl_out=$(mktemp) 58 s3_out=$(mktemp) 59 trap 'rm -f -- $s3_out $lakectl_out' INT TERM EXIT 60 61 docker compose exec -T lakefs lakectl fs ls --recursive --no-color lakefs://${REPOSITORY}/main/ | awk '{print $8}' | sort > ${lakectl_out} 62 aws s3 ls --recursive ${EXPORT_LOCATION} | awk '{print $4}'| cut -d/ -f ${n}- | grep -v EXPORT_ | sort > ${s3_out} 63 64 if ! diff ${lakectl_out} ${s3_out}; then 65 echo "export location and lakefs should contain same objects" 66 exit 1 67 fi 68 69 # Run Export with commit_id reference 70 run_cmd_and_validate "upload file_three" "docker compose exec -T lakefs lakectl fs upload lakefs://${REPOSITORY}/main/a/file_three.txt --source /local/file_three.txt" 71 72 commit_id=$(docker compose exec -T lakefs lakectl commit lakefs://${REPOSITORY}/main --message="added file_three" | sed -n 4p | awk '{print $2}') 73 74 # We should not validate the exit code - since it is erroneous 75 echo "run export" 76 docker compose --project-directory ${WORKING_DIRECTORY} run --rm lakefs-export ${REPOSITORY} ${EXPORT_LOCATION} --commit_id=$commit_id 77 78 echo "commit_id $commit_id" 79 80 # Validate sync 81 lakectl_out=$(mktemp) 82 s3_out=$(mktemp) 83 trap 'rm -f -- $s3_out $lakectl_out' INT TERM EXIT 84 85 echo "ls" 86 docker compose exec -T lakefs lakectl fs ls --recursive --no-color lakefs://${REPOSITORY}/main/ | awk '{print $8}' | sort > ${lakectl_out} 87 echo "res $?" 88 89 echo "aws ls" 90 aws s3 ls --recursive ${EXPORT_LOCATION} | awk '{print $4}'| cut -d/ -f ${n}- | grep -v EXPORT_ | sort > ${s3_out} 91 echo "res $?" 92 93 if ! diff ${lakectl_out} ${s3_out}; then 94 echo "export location and lakefs should contain same objects" 95 exit 1 96 fi 97 98 # Delete files at destination in case of multiple runs, each one produce output under different folder 99 aws s3 rm ${EXPORT_LOCATION} --recursive 100 echo "res $?"