github.com/redhat-appstudio/e2e-tests@v0.0.0-20240520140907-9709f6f59323/tests/load-tests/ci-scripts/collect-results.sh (about) 1 #!/bin/bash 2 3 set -o nounset 4 set -o errexit 5 set -o pipefail 6 7 # shellcheck disable=SC1090 8 source "/usr/local/ci-secrets/redhat-appstudio-load-test/load-test-scenario.${1:-concurrent}" 9 10 pushd "${2:-.}" 11 12 output_dir="${OUTPUT_DIR:-./tests/load-tests}" 13 14 source "./tests/load-tests/ci-scripts/user-prefix.sh" 15 16 echo "Collecting load test results" 17 load_test_log=$ARTIFACT_DIR/load-tests.log 18 find "$output_dir" -type f -name '*.log' -exec cp -vf {} "${ARTIFACT_DIR}" \; 19 find "$output_dir" -type f -name 'load-tests.json' -exec cp -vf {} "${ARTIFACT_DIR}" \; 20 find "$output_dir" -type f -name 'gh-rate-limits-remaining.csv' -exec cp -vf {} "${ARTIFACT_DIR}" \; 21 find "$output_dir" -type f -name '*.pprof' -exec cp -vf {} "${ARTIFACT_DIR}" \; 22 23 pipelineruns_json=$ARTIFACT_DIR/pipelineruns.json 24 taskruns_json=$ARTIFACT_DIR/taskruns.json 25 pods_json=$ARTIFACT_DIR/pods.json 26 27 application_timestamps=$ARTIFACT_DIR/applications.appstudio.redhat.com_timestamps 28 application_timestamps_csv=${application_timestamps}.csv 29 application_timestamps_txt=${application_timestamps}.txt 30 componentdetectionquery_timestamps=$ARTIFACT_DIR/componentdetectionqueries.appstudio.redhat.com_timestamps.csv 31 component_timestamps=$ARTIFACT_DIR/components.appstudio.redhat.com_timestamps.csv 32 pipelinerun_timestamps=$ARTIFACT_DIR/pipelineruns.tekton.dev_timestamps.csv 33 application_service_log=$ARTIFACT_DIR/application-service.log 34 application_service_log_segments=$ARTIFACT_DIR/application-service-log-segments 35 monitoring_collection_log=$ARTIFACT_DIR/monitoring-collection.log 36 monitoring_collection_data=$ARTIFACT_DIR/load-tests.json 37 monitoring_collection_dir=$ARTIFACT_DIR/monitoring-collection-raw-data-dir 38 mkdir -p "$monitoring_collection_dir" 39 csv_delim=";" 40 csv_delim_quoted="\"$csv_delim\"" 41 dt_format='"%Y-%m-%dT%H:%M:%SZ"' 42 43 ## Application timestamps 44 echo "Collecting Application timestamps..." 45 echo "Application${csv_delim}StatusSucceeded${csv_delim}StatusMessage${csv_delim}CreatedTimestamp${csv_delim}SucceededTimestamp${csv_delim}Duration" >"$application_timestamps_csv" 46 jq_cmd=".items[] | (.metadata.name) \ 47 + $csv_delim_quoted + (.status.conditions[0].status) \ 48 + $csv_delim_quoted + (.status.conditions[0].message) \ 49 + $csv_delim_quoted + (.metadata.creationTimestamp) \ 50 + $csv_delim_quoted + (.status.conditions[0].lastTransitionTime) \ 51 + $csv_delim_quoted + ((.status.conditions[0].lastTransitionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring)" 52 oc get applications.appstudio.redhat.com -A -o json | jq -rc "$jq_cmd" | sed -e 's,Z,,g' >>"$application_timestamps_csv" 53 oc get applications.appstudio.redhat.com -A -o 'custom-columns=NAME:.metadata.name,CREATED:.metadata.creationTimestamp,LAST_UPDATED:.status.conditions[0].lastTransitionTime,STATUS:.status.conditions[0].reason,MESSAGE:.status.conditions[0].message' >"$application_timestamps_txt" 54 55 ## ComponentDetectionQuery timestamps 56 echo "Collecting ComponentDetectionQuery timestamps..." 57 echo "ComponentDetectionQuery${csv_delim}Namespace${csv_delim}CreationTimestamp${csv_delim}Completed${csv_delim}Completed.Reason${csv_delim}Completed.Mesasge${csv_delim}Duration" >"$componentdetectionquery_timestamps" 58 jq_cmd=".items[] | (.metadata.name) \ 59 + $csv_delim_quoted + (.metadata.namespace) \ 60 + $csv_delim_quoted + (.metadata.creationTimestamp) \ 61 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Completed\")) // false) then (.status.conditions[] | select(.type == \"Completed\") | .lastTransitionTime + $csv_delim_quoted + .reason + $csv_delim_quoted + .message) else \"$csv_delim$csv_delim\" end)\ 62 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Completed\")) // false) then ((.status.conditions[] | select(.type == \"Completed\") | .lastTransitionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end)" 63 oc get componentdetectionqueries.appstudio.redhat.com -A -o json | jq -rc "$jq_cmd" | sed -e 's,Z,,g' >>"$componentdetectionquery_timestamps" 64 65 ## Component timestamps 66 echo "Collecting Component timestamps..." 67 echo "Component${csv_delim}Namespace${csv_delim}CreationTimestamp${csv_delim}Created${csv_delim}Created.Reason${csv_delim}Create.Mesasge${csv_delim}GitOpsResourcesGenerated${csv_delim}GitOpsResourcesGenerated.Reason${csv_delim}GitOpsResourcesGenerated.Message${csv_delim}Updated${csv_delim}Updated.Reason${csv_delim}Updated.Message${csv_delim}CreationTimestamp->Created${csv_delim}Created->GitOpsResourcesGenerated${csv_delim}GitOpsResourcesGenerated->Updated${csv_delim}Duration" >"$component_timestamps" 68 jq_cmd=".items[] | (.metadata.name) \ 69 + $csv_delim_quoted + (.metadata.namespace) \ 70 + $csv_delim_quoted + (.metadata.creationTimestamp) \ 71 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Created\")) // false) then (.status.conditions[] | select(.type == \"Created\") | .lastTransitionTime + $csv_delim_quoted + .reason + $csv_delim_quoted + (.message|split($csv_delim_quoted)|join(\"_\"))) else \"$csv_delim$csv_delim\" end) \ 72 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\")) // false) then (.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\") | .lastTransitionTime + $csv_delim_quoted + .reason + $csv_delim_quoted + (.message|split($csv_delim_quoted)|join(\"_\"))) else \"$csv_delim$csv_delim\" end) \ 73 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Updated\")) // false) then (.status.conditions[] | select(.type == \"Updated\") | .lastTransitionTime + $csv_delim_quoted + .reason + $csv_delim_quoted + (.message|split($csv_delim_quoted)|join(\"_\"))) else \"$csv_delim$csv_delim\" end) \ 74 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Created\")) // false) then ((.status.conditions[] | select(.type == \"Created\") | .lastTransitionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end)\ 75 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\")) // false) and ((.status.conditions[] | select(.type == \"Created\")) // false) then ((.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\") | .lastTransitionTime | strptime($dt_format) | mktime) - (.status.conditions[] | select(.type == \"Created\") | .lastTransitionTime | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 76 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Updated\")) // false) and ((.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\")) // false) then ((.status.conditions[] | select(.type == \"Updated\") | .lastTransitionTime | strptime($dt_format) | mktime) - (.status.conditions[] | select(.type == \"GitOpsResourcesGenerated\") | .lastTransitionTime | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 77 + $csv_delim_quoted + (if ((.status.conditions[] | select(.type == \"Updated\")) // false) then ((.status.conditions[] | select(.type == \"Updated\") | .lastTransitionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end)" 78 oc get components.appstudio.redhat.com -A -o json | jq -rc "$jq_cmd" | sed -e 's,Z,,g' >>"$component_timestamps" 79 80 ## PipelineRun timestamps 81 echo "Collecting PipelineRun timestamps..." 82 echo "PipelineRun${csv_delim}Namespace${csv_delim}Succeeded${csv_delim}Reason${csv_delim}Message${csv_delim}Created${csv_delim}Started${csv_delim}FinallyStarted${csv_delim}Completed${csv_delim}Created->Started${csv_delim}Started->FinallyStarted${csv_delim}FinallyStarted->Completed${csv_delim}SucceededDuration${csv_delim}FailedDuration" >"$pipelinerun_timestamps" 83 jq_cmd=".items[] | (.metadata.name) \ 84 + $csv_delim_quoted + (.metadata.namespace) \ 85 + $csv_delim_quoted + (.status.conditions[0].status) \ 86 + $csv_delim_quoted + (.status.conditions[0].reason) \ 87 + $csv_delim_quoted + (.status.conditions[0].message|split($csv_delim_quoted)|join(\"_\")) \ 88 + $csv_delim_quoted + (.metadata.creationTimestamp) \ 89 + $csv_delim_quoted + (.status.startTime) \ 90 + $csv_delim_quoted + (.status.finallyStartTime) \ 91 + $csv_delim_quoted + (.status.completionTime) \ 92 + $csv_delim_quoted + (if .status.startTime != null and .metadata.creationTimestamp != null then ((.status.startTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 93 + $csv_delim_quoted + (if .status.finallyStartTime != null and .status.startTime != null then ((.status.finallyStartTime | strptime($dt_format) | mktime) - (.status.startTime | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 94 + $csv_delim_quoted + (if .status.completionTime != null and .status.finallyStartTime != null then ((.status.completionTime | strptime($dt_format) | mktime) - (.status.finallyStartTime | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 95 + $csv_delim_quoted + (if .status.conditions[0].status == \"True\" and .status.completionTime != null and .metadata.creationTimestamp != null then ((.status.completionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end) \ 96 + $csv_delim_quoted + (if .status.conditions[0].status == \"False\" and .status.completionTime != null and .metadata.creationTimestamp != null then ((.status.completionTime | strptime($dt_format) | mktime) - (.metadata.creationTimestamp | strptime($dt_format) | mktime) | tostring) else \"\" end)" 97 oc get pipelineruns.tekton.dev -A -o json >"$pipelineruns_json" 98 jq "$jq_cmd" "$pipelineruns_json" | sed -e "s/\n//g" -e "s/^\"//g" -e "s/\"$//g" -e "s/Z;/;/g" | sort -t ";" -k 13 -r -n >>"$pipelinerun_timestamps" 99 100 ## Application service log segments per user app 101 echo "Collecting application service log segments per user app..." 102 oc logs -l "control-plane=controller-manager" --tail=-1 -n application-service >"$application_service_log" 103 mkdir -p "$application_service_log_segments" 104 for i in $(grep -Eo "${USER_PREFIX}-....-app" "$application_service_log" | sort | uniq); do grep "$i" "$application_service_log" >"$application_service_log_segments/$i.log"; done 105 ## Error summary 106 echo "Error summary:" 107 if [ -f "$load_test_log" ]; then 108 grep -Eo "Error #[0-9]+" "$load_test_log" | sort | uniq | while read -r i; do 109 echo -n " - $i: " 110 grep -c "$i" "$load_test_log" 111 done | sort -V || : 112 else 113 echo "WARNING: File $load_test_log not found!" 114 fi 115 116 ## Monitoring data 117 echo "Setting up tool to collect monitoring data..." 118 python3 -m venv venv 119 set +u 120 source venv/bin/activate 121 set -u 122 python3 -m pip install -U pip 123 python3 -m pip install -e "git+https://github.com/redhat-performance/opl.git#egg=opl-rhcloud-perf-team-core&subdirectory=core" 124 125 echo "Collecting monitoring data..." 126 if [ -f "$monitoring_collection_data" ]; then 127 mstart=$(date --utc --date "$(status_data.py --status-data-file "$monitoring_collection_data" --get timestamp)" --iso-8601=seconds) 128 mend=$(date --utc --date "$(status_data.py --status-data-file "$monitoring_collection_data" --get endTimestamp)" --iso-8601=seconds) 129 mhost=$(oc -n openshift-monitoring get route -l app.kubernetes.io/name=thanos-query -o json | jq --raw-output '.items[0].spec.host') 130 status_data.py \ 131 --status-data-file "$monitoring_collection_data" \ 132 --additional ./tests/load-tests/cluster_read_config.yaml \ 133 --monitoring-start "$mstart" \ 134 --monitoring-end "$mend" \ 135 --monitoring-raw-data-dir "$monitoring_collection_dir" \ 136 --prometheus-host "https://$mhost" \ 137 --prometheus-port 443 \ 138 --prometheus-token "$(oc whoami -t)" \ 139 -d &>$monitoring_collection_log 140 set +u 141 deactivate 142 set -u 143 else 144 echo "WARNING: File $monitoring_collection_data not found!" 145 fi 146 147 if [ "${TEKTON_PERF_ENABLE_CPU_PROFILING:-}" == "true" ] || [ "${TEKTON_PERF_ENABLE_MEMORY_PROFILING:-}" == "true" ]; then 148 echo "Collecting profiling data from Tekton" 149 for pprof_profile in $(find "$output_dir" -name "*.pprof"); do 150 file=$(basename "$pprof_profile") 151 go tool pprof -text "$pprof_profile" >"$ARTIFACT_DIR/$file.txt" || true 152 go tool pprof -svg -output="$ARTIFACT_DIR/$file.svg" "$pprof_profile" || true 153 done 154 fi 155 156 ## Pods on Nodes distribution 157 node_info_csv=$ARTIFACT_DIR/node-info.csv 158 echo "Collecting node specs" 159 echo "Node;CPUs;Memory;InstanceType;NodeType;Zone" >"$node_info_csv" 160 jq_cmd=".items[] | .metadata.name \ 161 + $csv_delim_quoted + .status.capacity.cpu \ 162 + $csv_delim_quoted + .status.capacity.memory \ 163 + $csv_delim_quoted + .metadata.labels.\"node.kubernetes.io/instance-type\" \ 164 + $csv_delim_quoted + (if .metadata.labels.\"node-role.kubernetes.io/worker\" != null then \"worker\" else \"master\" end) \ 165 + $csv_delim_quoted + .metadata.labels.\"topology.kubernetes.io/zone\"" 166 oc get nodes -o json | jq -r "$jq_cmd" >>"$node_info_csv" 167 168 oc get pods -A -o json >"$pods_json" 169 pods_on_nodes_csv=$ARTIFACT_DIR/pods-on-nodes.csv 170 all_pods_distribution_csv=$ARTIFACT_DIR/all-pods-distribution.csv 171 task_pods_distribution_csv=$ARTIFACT_DIR/task-pods-distribution.csv 172 echo "Collecting pod distribution over nodes" 173 echo "Node;Namespace;Pod" >"$pods_on_nodes_csv" 174 jq_cmd=".items[] | select(.metadata.labels.\"appstudio.openshift.io/application\" != null) \ 175 | select(.metadata.labels.\"appstudio.openshift.io/application\" | startswith(\"$USER_PREFIX\")) \ 176 | .spec.nodeName \ 177 + $csv_delim_quoted + .metadata.namespace \ 178 + $csv_delim_quoted + .metadata.name" 179 jq -r "$jq_cmd" "$pods_json" | sort -V >>"$pods_on_nodes_csv" 180 echo "Node;Pods" >"$all_pods_distribution_csv" 181 jq -r ".items[] | .spec.nodeName" "$pods_json" | sort | uniq -c | sed -e 's,\s\+\([0-9]\+\)\s\+\(.*\),\2;\1,g' >>"$all_pods_distribution_csv" 182 echo "Node;Pods" >"$task_pods_distribution_csv" 183 jq -r '.items[] | select(.metadata.labels."appstudio.openshift.io/application" != null).spec.nodeName' "$pods_json" | sort | uniq -c | sed -e 's,\s\+\([0-9]\+\)\s\+\(.*\),\2;\1,g' >>"$task_pods_distribution_csv" 184 185 ## Tekton Artifact Performance Analysis 186 tapa_dir=./tapa.git 187 188 echo "Installing Tekton Artifact Performance Analysis (tapa)" 189 rm -rf "$tapa_dir" 190 git clone https://github.com/gabemontero/tekton-artifact-performance-analysis "$tapa_dir" 191 pushd "$tapa_dir" 192 go mod tidy 193 go mod vendor 194 go build -o tapa . && chmod +x ./tapa 195 popd 196 export PATH="$PATH:$tapa_dir" 197 198 tapa="tapa -t csv" 199 200 echo "Running Tekton Artifact Performance Analysis" 201 oc get taskruns.tekton.dev -A -o json >"$taskruns_json" 202 tapa_prlist_csv=$ARTIFACT_DIR/tapa.prlist.csv 203 tapa_trlist_csv=$ARTIFACT_DIR/tapa.trlist.csv 204 tapa_podlist_csv=$ARTIFACT_DIR/tapa.podlist.csv 205 tapa_podlist_containers_csv=$ARTIFACT_DIR/tapa.podlist.containers.csv 206 tapa_all_csv=$ARTIFACT_DIR/tapa.all.csv 207 tapa_tmp=tapa.tmp 208 209 sort_csv() { 210 if [ -f "$1" ]; then 211 head -n1 "$1" >"$2" 212 tail -n+2 "$1" | sort -t ";" -k 2 -r -n >>"$2" 213 else 214 echo "WARNING: File $1 not found!" 215 fi 216 } 217 218 $tapa prlist "$pipelineruns_json" >"$tapa_tmp" 219 sort_csv "$tapa_tmp" "$tapa_prlist_csv" 220 221 $tapa trlist "$taskruns_json" >"$tapa_tmp" 222 sort_csv "$tapa_tmp" "$tapa_trlist_csv" 223 224 $tapa podlist "$pods_json" >"$tapa_tmp" 225 sort_csv "$tapa_tmp" "$tapa_podlist_csv" 226 227 $tapa podlist --containers-only "$pods_json" >"$tapa_tmp" 228 sort_csv "$tapa_tmp" "$tapa_podlist_containers_csv" 229 230 $tapa all "$pipelineruns_json" "$taskruns_json" "$pods_json" >"$tapa_tmp" 231 sort_csv "$tapa_tmp" "$tapa_all_csv" 232 233 popd