k8s.io/test-infra@v0.0.0-20240520184403-27c6b4c223d8/experiment/flakedetector.py (about) 1 #!/usr/bin/env python3 2 3 # Copyright 2017 The Kubernetes Authors. 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 # See the License for the specific language governing permissions and 15 # limitations under the License. 16 17 18 """Counts the number of flakes in PRs using data from prow. 19 20 A flake is counted if a job passes and fails for the same pull commit. This 21 isn't a perfect signal, since something might have happened on master that 22 makes it flake, but I think it's good enough. There will also be false 23 negatives when flakes don't show up because the PR author changed the PR. 24 Still, this is a good signal. 25 26 Only serial jobs are considered for the flake calculation, batch jobs are 27 ignored. 28 29 """ 30 31 import operator 32 33 import requests 34 35 def main(): # pylint: disable=too-many-branches 36 """Run flake detector.""" 37 res = requests.get( 38 'https://prow.k8s.io/prowjobs.js?omit=annotations,labels,decoration_config,pod_spec' 39 ) 40 job_results = res.json() 41 42 jobs = {} # job -> {sha -> [results...]} 43 commits = {} # sha -> {job -> [results...]} 44 for res in job_results['items']: 45 spec = res['spec'] 46 status = res['status'] 47 48 if spec['type'] != 'presubmit': 49 continue 50 if spec['refs']['org'] != 'kubernetes' and spec['refs']['repo'] != 'kubernetes': 51 continue 52 if status['state'] != 'success' and status['state'] != 'failure': 53 continue 54 # populate jobs 55 if spec['job'] not in jobs: 56 jobs[spec['job']] = {} 57 if spec['refs']['base_sha'] not in jobs[spec['job']]: 58 jobs[spec['job']][spec['refs']['base_sha']] = [] 59 jobs[spec['job']][spec['refs']['base_sha']].append(status['state']) 60 # populate commits 61 if spec['refs']['base_sha'] not in commits: 62 commits[spec['refs']['base_sha']] = {} 63 if spec['job'] not in commits[spec['refs']['base_sha']]: 64 commits[spec['refs']['base_sha']][spec['job']] = [] 65 commits[spec['refs']['base_sha']][spec['job']].append(status['state']) 66 67 job_commits = {} 68 job_flakes = {} 69 for job, shas in jobs.items(): 70 job_commits[job] = len(shas) 71 job_flakes[job] = 0 72 for results in shas.values(): 73 if 'success' in results and 'failure' in results: 74 job_flakes[job] += 1 75 76 print('Certain flakes:') 77 for job, flakes in sorted(job_flakes.items(), key=operator.itemgetter(1), reverse=True): 78 if job_commits[job] < 10: 79 continue 80 fail_chance = flakes / job_commits[job] 81 print('{}/{}\t({:.0f}%)\t{}'.format(flakes, job_commits[job], 100*fail_chance, job)) 82 83 # for each commit, flaked iff exists job that flaked 84 flaked = 0 85 for _, job_results in commits.items(): 86 for job, results in job_results.items(): 87 if 'success' in results and 'failure' in results: 88 flaked += 1 89 break 90 print('Commits that flaked (passed and failed some job): %d/%d %.2f%%' % 91 (flaked, len(commits), (flaked*100.0)/len(commits))) 92 93 94 if __name__ == '__main__': 95 main()