github.com/greenplum-db/gpbackup@v0.0.0-20240517212602-89daab1885b3/ci/scripts/s3-plugin-perf.bash (about)

     1  #!/bin/bash
     2  
     3  set -ex
     4  
     5  . gpbackup/ci/scripts/setup-perf.bash
     6  
     7  cat <<SCRIPT > /tmp/run_perf.bash
     8  #!/bin/bash
     9  
    10  set -e
    11  source env.sh
    12  
    13  TIMEFORMAT=%R
    14  
    15  function print_header() {
    16      header="### \$1 ###"
    17      len=\$(echo \$header | awk '{print length}')
    18      printf "%0.s#" \$(seq 1 \$len) && echo
    19      echo -e "\$header"
    20      printf "%0.s#" \$(seq 1 \$len) && echo
    21  }
    22  
    23  function print_time_exec() {
    24    echo \$1
    25    time eval \$1
    26  }
    27  
    28  cat << CONFIG > \${HOME}/s3_config.yaml
    29  executablepath: \${GPHOME}/bin/gpbackup_s3_plugin
    30  options:
    31    region: ${REGION}
    32    aws_access_key_id: ${AWS_ACCESS_KEY_ID}
    33    aws_secret_access_key: ${AWS_SECRET_ACCESS_KEY}
    34    bucket: ${BUCKET}
    35    folder: test/backup
    36  CONFIG
    37  
    38  # ----------------------------------------------
    39  # Run S3 Plugin scale tests
    40  # ----------------------------------------------
    41  pushd \${GOPATH}/src/github.com/greenplum-db/gpbackup/plugins
    42  print_time_exec "./plugin_test_scale.sh \${GPHOME}/bin/gpbackup_s3_plugin ~/s3_config.yaml"
    43  popd
    44  
    45  # ----------------------------------------------
    46  # Run S3 Plugin RESTORE_DIRECTORY and BACKUP_DIRECTORY serial
    47  # ----------------------------------------------
    48  mkdir -p /data/gpdata/stage1 /data/gpdata/stage2
    49  pushd /data/gpdata/stage1
    50  # Copy data from S3 to local using restore_directory
    51  print_header "RESTORE_DIRECTORY (SERIAL) with ${SCALE_FACTOR} GB of data"
    52  print_time_exec "\${GPHOME}/bin/gpbackup_s3_plugin restore_directory \
    53      ~/s3_config.yaml benchmark/tpch/lineitem/${SCALE_FACTOR}/lineitem_data"
    54  echo
    55  mkdir -p tmp/\$timestamp && mv benchmark tmp/\$timestamp
    56  # Copy data from local to S3 using backup_directory
    57  print_header "BACKUP_DIRECTORY (SERIAL) with ${SCALE_FACTOR} GB of data"
    58  print_time_exec "\${GPHOME}/bin/gpbackup_s3_plugin backup_directory \
    59      ~/s3_config.yaml tmp/\$timestamp/benchmark/tpch/lineitem"
    60  echo
    61  rm -rf ~/tpch_data/tmp/\$timestamp
    62  
    63  # ----------------------------------------------
    64  # Run S3 Plugin RESTORE_DIRECTORY and BACKUP_DIRECTORY parallel
    65  # ----------------------------------------------
    66  popd && pushd /data/gpdata/stage2
    67  print_header "RESTORE_DIRECTORY (PARALLEL=5) with ${SCALE_FACTOR} GB of data"
    68  # Copy data from S3 to local using restore_directory_parallel
    69  print_time_exec "\${GPHOME}/bin/gpbackup_s3_plugin restore_directory_parallel \
    70      ~/s3_config.yaml benchmark/tpch/lineitem/${SCALE_FACTOR}/lineitem_data"
    71  echo
    72  mkdir -p tmp/\$timestamp && mv benchmark tmp/\$timestamp
    73  # Copy data from local to S3 using backup_directory_parallel
    74  print_header "BACKUP_DIRECTORY (PARALLEL=5) with ${SCALE_FACTOR} GB of data"
    75  print_time_exec "\${GPHOME}/bin/gpbackup_s3_plugin backup_directory_parallel \
    76      ~/s3_config.yaml tmp/\$timestamp/benchmark/tpch/lineitem"
    77  echo
    78  rm -rf ~/tpch_data/tmp/\$timestamp
    79  popd
    80  
    81  SCRIPT
    82  
    83  chmod +x /tmp/run_perf.bash
    84  scp /tmp/run_perf.bash cdw:
    85  ssh -t cdw "/home/gpadmin/run_perf.bash"