k8s.io/test-infra/triage@v0.0.0-20240520184403-27c6b4c223d8/update_summaries.sh (about) 1 #!/usr/bin/env bash 2 3 # Copyright 2020 The Kubernetes Authors. 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 # See the License for the specific language governing permissions and 15 # limitations under the License. 16 17 set -exu 18 19 # dataset table to query for build info 20 readonly BUILD_DATASET_TABLE="${BUILD_DATASET_TABLE:-"kubernetes-public:k8s_infra_kettle.all"}" 21 22 # dataset to write temp results to for triage 23 readonly TRIAGE_DATASET_TABLE="${TRIAGE_DATASET_TABLE:-"k8s-gubernator:temp.triage"}" 24 25 # gcs bucket to write temporary results to 26 readonly TRIAGE_TEMP_GCS_PATH="${TRIAGE_TEMP_GCS_PATH:-"gs://k8s-gubernator/triage_tests"}" 27 28 # gcs uri to write final triage results to 29 readonly TRIAGE_GCS_PATH="${TRIAGE_GCS_PATH:-"gs://k8s-gubernator/triage"}" 30 31 # the gcp project against which to bill bq usage 32 readonly TRIAGE_BQ_USAGE_PROJECT="${TRIAGE_BQ_USAGE_PROJECT:-"k8s-gubernator"}" 33 34 cd "$(dirname "$0")" 35 36 start=$(date +%s) 37 38 if [[ -e ${GOOGLE_APPLICATION_CREDENTIALS-} ]]; then 39 echo "activating service account with credentials at: ${GOOGLE_APPLICATION_CREDENTIALS}" 40 gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" 41 fi 42 43 gcloud config set project "${TRIAGE_BQ_USAGE_PROJECT}" 44 45 bq show <<< $'\n' 46 47 date 48 49 # populate triage_builds.json with build metadata 50 bq --project_id="${TRIAGE_BQ_USAGE_PROJECT}" --headless --format=json query --max_rows 1000000 \ 51 "select 52 path, 53 timestamp_to_sec(started) started, 54 elapsed, 55 tests_run, 56 tests_failed, 57 result, 58 executor, 59 job, 60 number 61 from 62 [${BUILD_DATASET_TABLE}] 63 where 64 timestamp_to_sec(started) > TIMESTAMP_TO_SEC(DATE_ADD(CURRENT_DATE(), -14, 'DAY')) 65 and job != 'ci-kubernetes-coverage-unit'" \ 66 > triage_builds.json 67 68 # populate ${TRIAGE_DATASET_TABLE} with test failures 69 bq --project_id="${TRIAGE_BQ_USAGE_PROJECT}" query --allow_large_results --headless --max_rows 0 --replace --destination_table "${TRIAGE_DATASET_TABLE}" \ 70 "select 71 timestamp_to_sec(started) started, 72 path build, 73 test.name name, 74 test.failure_text failure_text 75 from 76 [${BUILD_DATASET_TABLE}] 77 where 78 test.failed 79 and timestamp_to_sec(started) > TIMESTAMP_TO_SEC(DATE_ADD(CURRENT_DATE(), -14, 'DAY')) 80 and job != 'ci-kubernetes-coverage-unit'" 81 82 gsutil rm "${TRIAGE_TEMP_GCS_PATH}/shard_*.json.gz" || true 83 bq extract --compression GZIP --destination_format NEWLINE_DELIMITED_JSON "${TRIAGE_DATASET_TABLE}" "${TRIAGE_TEMP_GCS_PATH}/shard_*.json.gz" 84 mkdir -p triage_tests 85 gsutil cp -r "${TRIAGE_TEMP_GCS_PATH}/*" triage_tests/ 86 gzip -df triage_tests/*.gz 87 88 # gsutil cp "${TRIAGE_BUCKET}/failure_data.json failure_data_previous.json 89 90 mkdir -p slices 91 92 /triage \ 93 --builds triage_builds.json \ 94 --output failure_data.json \ 95 --output_slices slices/failure_data_PREFIX.json \ 96 ${NUM_WORKERS:+"--num_workers=${NUM_WORKERS}"} \ 97 triage_tests/*.json 98 99 gsutil_cp() { 100 gsutil -h 'Cache-Control: no-store, must-revalidate' -m cp -Z "$@" 101 } 102 103 gsutil_cp failure_data.json "${TRIAGE_GCS_PATH}/" 104 gsutil_cp slices/*.json "${TRIAGE_GCS_PATH}/slices/" 105 gsutil_cp failure_data.json "${TRIAGE_GCS_PATH}/history/$(date -u +%Y%m%d).json" 106 107 stop=$(date +%s) 108 elapsed=$(( stop - start )) 109 echo "Finished in $(( elapsed / 60))m$(( elapsed % 60))s"