github.com/verrazzano/verrazzano@v1.7.0/ci/psr/Jenkinsfile (about) 1 // Copyright (c) 2023, Oracle and/or its affiliates. 2 // Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. 3 4 def DOCKER_IMAGE_TAG 5 def SUSPECT_LIST = "" 6 def VERRAZZANO_DEV_VERSION = "" 7 def EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = false 8 def tarfilePrefix="" 9 def storeLocation="" 10 11 def agentLabel = env.JOB_NAME.contains('master') ? "2.0-large-phx" : "2.0-large" 12 13 pipeline { 14 options { 15 skipDefaultCheckout true 16 copyArtifactPermission('*'); 17 timestamps () 18 } 19 20 agent { 21 docker { 22 image "${RUNNER_DOCKER_IMAGE}" 23 args "${RUNNER_DOCKER_ARGS}" 24 registryUrl "${RUNNER_DOCKER_REGISTRY_URL}" 25 registryCredentialsId 'ocir-pull-and-push-account' 26 label "${agentLabel}" 27 } 28 } 29 30 parameters { 31 choice (name: 'KUBERNETES_CLUSTER_VERSION', description: 'Kubernetes Version for KinD Cluster', choices: [ "1.27", "1.26", "1.25", "1.24" ]) 32 string (name: 'VZ_BRANCH_TO_USE', defaultValue: 'master', trim: true, description: 'This is the name of the Verrazzano product branch to use for testing the PSR tooling; CURRENT means using the working branch') 33 string (name: 'VERRAZZANO_OPERATOR_IMAGE', defaultValue: 'NONE', trim: true, description: 'Uses a specific Verrazzano platform operator image name (in ghcr.io repo). If not specified, the latest operator.yaml from related Verrazzano repo branch will be used to create Verrazzano platform operator manifest') 34 string (name: 'KIND_NODE_COUNT', defaultValue: '3', trim: true, description: 'Number of nodes for the KIND cluster for smoke testing') 35 booleanParam (description: 'Whether to create the cluster with Calico for AT testing (defaults to true)', name: 'CREATE_CLUSTER_USE_CALICO', defaultValue: true) 36 booleanParam (description: 'Whether to capture full cluster snapshot on test failure', name: 'CAPTURE_FULL_CLUSTER', defaultValue: false) 37 booleanParam (description: 'Whether to dump k8s cluster on success (off by default can be useful to capture for comparing to failed cluster)', name: 'DUMP_K8S_CLUSTER_ON_SUCCESS', defaultValue: false) 38 39 booleanParam (name: 'PERFORM_SCAN', defaultValue: false, description: 'Whether to perform a scan of the built images') 40 booleanParam (name: 'FAIL_IF_COVERAGE_DECREASED', defaultValue: false, description: 'Whether to fail build if UT coverage number decreases lower than its release-* coverage from object storage. This defaults to true, meaning Any non release-*/master branch will fail if its coverage is lower. This can be disabled so that jobs only WARN when coverage drops but not fail.') 41 booleanParam (name: 'UPLOAD_UNIT_TEST_COVERAGE', defaultValue: false, description: 'Whether to write the UT coverage number to object storage. This always occurs for release-*/master branches. Defaults to true, but it can be disabled to not always upload.') 42 booleanParam (name: 'SKIP_BUILD', defaultValue: false, description: "Skip the PSR tool and Worker builds") 43 booleanParam (name: 'SKIP_TEST', defaultValue: false, description: "Skip the psrctl scenario validation tests") 44 booleanParam (name: 'VZ_TEST_DEBUG', defaultValue: true, description: "Enables debug of Verrazzano installation scripts") 45 string (name: 'TAGGED_TESTS', defaultValue: '', trim: true, description: 'A comma separated list of build tags for tests that should be executed (e.g. unstable_test). Default:') 46 string (name: 'INCLUDED_TESTS', defaultValue: '.*', description: 'A regex matching any fully qualified test file that should be executed (e.g. examples/helidon/). Default: .*', trim: true) 47 string (name: 'EXCLUDED_TESTS', defaultValue: '_excluded_test', description: 'A regex matching any fully qualified test file that should not be executed (e.g. multicluster/|_excluded_test). Default: _excluded_test', trim: true) 48 } 49 50 environment { 51 TEST_ENV = "PSR" 52 53 DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME = 'psr-backend' 54 DOCKER_PSR_BRANCH_BACKEND_IMAGE_NAME = '${DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME}-jenkins' 55 DOCKER_PSR_BACKEND_IMAGE_NAME = "${env.BRANCH_NAME ==~ /^release-.*/ || env.BRANCH_NAME == 'master' ? env.DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME : env.DOCKER_PSR_BRANCH_BACKEND_IMAGE_NAME}" 56 57 GOPATH = '/home/opc/go' 58 GO_REPO_PATH = "${GOPATH}/src/github.com/verrazzano" 59 VZ_ROOT="${GO_REPO_PATH}/verrazzano" 60 PSR_PATH = "${VZ_ROOT}/tools/psr" 61 62 DOCKER_CREDS = credentials('github-packages-credentials-rw') 63 DOCKER_EMAIL = credentials('github-packages-email') 64 DOCKER_REGISTRY = 'ghcr.io' 65 DOCKER_REPO = "verrazzano" 66 OCR_CREDS = credentials('ocr-pull-and-push-account') 67 OCR_REPO = 'container-registry.oracle.com' 68 69 NETRC_FILE = credentials('netrc') 70 GITHUB_PKGS_CREDS = credentials('github-packages-credentials-rw') 71 SERVICE_KEY = credentials('PAGERDUTY_SERVICE_KEY') 72 73 // used for console artifact capture on failure 74 JENKINS_READ = credentials('jenkins-auditor') 75 76 OCI_CLI_AUTH="instance_principal" 77 OCI_OS_NAMESPACE = credentials('oci-os-namespace') 78 OCI_OS_ARTIFACT_BUCKET="build-failure-artifacts" 79 OCI_OS_BUCKET="verrazzano-builds" 80 OCI_OS_COMMIT_BUCKET="verrazzano-builds-by-commit" 81 OCI_OS_REGION="us-phoenix-1" 82 83 // used to emit metrics 84 PROMETHEUS_CREDENTIALS = credentials('prometheus-credentials') 85 86 OCIR_SCAN_COMPARTMENT = credentials('ocir-scan-compartment') 87 OCIR_SCAN_TARGET = credentials('ocir-scan-target') 88 OCIR_SCAN_REGISTRY = credentials('ocir-scan-registry') 89 OCIR_SCAN_REPOSITORY_PATH = credentials('ocir-scan-repository-path') 90 DOCKER_SCAN_CREDS = credentials('v8odev-ocir') 91 92 // Environment variable for Verrazzano CLI executable 93 VZ_COMMAND="${WORKSPACE}/vz" 94 95 // used to generate Ginkgo test reports 96 TEST_REPORT = "test-report.xml" 97 TESTS_EXECUTED_FILE = "${WORKSPACE}/tests_executed_file.tmp" 98 99 KUBECONFIG = "${WORKSPACE}/test_kubeconfig" 100 VERRAZZANO_KUBECONFIG = "${KUBECONFIG}" 101 WILDCARD_DNS_DOMAIN = "nip.io" 102 103 PSR_COMMAND = "${VZ_ROOT}/psrctl" 104 } 105 106 stages { 107 stage('Clean workspace and checkout') { 108 steps { 109 sh """ 110 echo "${NODE_LABELS}" 111 """ 112 113 script { 114 def scmInfo = checkout scm 115 env.GIT_COMMIT = scmInfo.GIT_COMMIT 116 env.GIT_BRANCH = scmInfo.GIT_BRANCH 117 echo "SCM checkout of ${env.GIT_BRANCH} at ${env.GIT_COMMIT}" 118 } 119 sh """ 120 cp -f "${NETRC_FILE}" $HOME/.netrc 121 chmod 600 $HOME/.netrc 122 """ 123 124 script { 125 try { 126 sh """ 127 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REGISTRY} -u ${DOCKER_CREDS_USR} --password-stdin 128 """ 129 } catch(error) { 130 echo "docker login failed, retrying after sleep" 131 retry(4) { 132 sleep(30) 133 sh """ 134 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REGISTRY} -u ${DOCKER_CREDS_USR} --password-stdin 135 """ 136 } 137 } 138 } 139 moveContentToGoRepoPath() 140 141 script { 142 EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = getEffectiveDumpOnSuccess() 143 // Create the image tag 144 def props = readProperties file: '.verrazzano-development-version' 145 VERRAZZANO_DEV_VERSION = props['verrazzano-development-version'] 146 TIMESTAMP = sh(returnStdout: true, script: "date +%Y%m%d%H%M%S").trim() 147 SHORT_COMMIT_HASH = sh(returnStdout: true, script: "echo $env.GIT_COMMIT | head -c 8") 148 env.VERRAZZANO_VERSION = "${VERRAZZANO_DEV_VERSION}" 149 if (!"${env.GIT_BRANCH}".startsWith("release-")) { 150 env.VERRAZZANO_VERSION = "${env.VERRAZZANO_VERSION}-${env.BUILD_NUMBER}+${SHORT_COMMIT_HASH}" 151 } 152 DOCKER_IMAGE_TAG = "v${VERRAZZANO_DEV_VERSION}-${TIMESTAMP}-${SHORT_COMMIT_HASH}" 153 154 env.VZ_TEST_BRANCH = params.VZ_BRANCH_TO_USE 155 if (params.VZ_BRANCH_TO_USE == "CURRENT") { 156 env.VZ_TEST_BRANCH = env.BRANCH_NAME 157 } 158 echo "VZ_TEST_BRANCH: ${env.VZ_TEST_BRANCH}" 159 160 // update the description with some meaningful info 161 currentBuild.description = SHORT_COMMIT_HASH + " : " + env.GIT_COMMIT 162 def currentCommitHash = env.GIT_COMMIT 163 def commitList = getCommitList() 164 withCredentials([file(credentialsId: 'jenkins-to-slack-users', variable: 'JENKINS_TO_SLACK_JSON')]) { 165 def userMappings = readJSON file: JENKINS_TO_SLACK_JSON 166 SUSPECT_LIST = getSuspectList(commitList, userMappings) 167 echo "Suspect list: ${SUSPECT_LIST}" 168 } 169 } 170 } 171 } 172 173 stage('Parallel Build, Test, and Compliance') { 174 parallel { 175 stage('Build PSR CLI and Save Binaries') { 176 when { 177 expression {params.SKIP_BUILD == false} 178 } 179 environment { 180 DOCKER_REPO="${DOCKER_REPO}" 181 DOCKER_REGISTRY="${DOCKER_REGISTRY}" 182 DOCKER_IMAGE_NAME="${DOCKER_PSR_BACKEND_IMAGE_NAME}" 183 DOCKER_IMAGE_TAG="${DOCKER_IMAGE_TAG}" 184 } 185 steps { 186 buildCLI() 187 } 188 post { 189 success { 190 script { 191 archiveArtifacts artifacts: '**/*.tar.gz*', allowEmptyArchive: true 192 echo "Saving CLI Binary" 193 saveCLIExecutables() 194 } 195 196 } 197 } 198 } 199 200 stage('Build PSR Images and Save Generated Files') { 201 when { 202 allOf { 203 not { buildingTag() } 204 expression {params.SKIP_BUILD == false} 205 } 206 } 207 environment { 208 DOCKER_REPO="${DOCKER_REPO}" 209 DOCKER_REGISTRY="${DOCKER_REGISTRY}" 210 DOCKER_IMAGE_NAME="${DOCKER_PSR_BACKEND_IMAGE_NAME}" 211 DOCKER_IMAGE_TAG="${DOCKER_IMAGE_TAG}" 212 } 213 steps { 214 script { 215 buildImages("${DOCKER_IMAGE_TAG}") 216 } 217 } 218 } 219 220 stage('Quality, Compliance Checks, and Unit Tests') { 221 when { 222 allOf { 223 not { buildingTag() } 224 expression {params.SKIP_BUILD == false} 225 } 226 } 227 steps { 228 sh """ 229 cd ${PSR_PATH} 230 make psr-quality 231 """ 232 } 233 post { 234 always { 235 sh """ 236 cd ${PSR_PATH} 237 cp coverage.html ${WORKSPACE} 238 cp coverage.xml ${WORKSPACE} 239 ${VZ_ROOT}/build/copy-junit-output.sh ${WORKSPACE} 240 """ 241 cobertura(coberturaReportFile: 'coverage.xml', 242 enableNewApi: true, 243 autoUpdateHealth: false, 244 autoUpdateStability: false, 245 failUnstable: true, 246 failUnhealthy: true, 247 failNoReports: true, 248 onlyStable: false, 249 fileCoverageTargets: '100, 0, 0', 250 lineCoverageTargets: '68, 68, 68', 251 packageCoverageTargets: '100, 0, 0', 252 ) 253 archiveArtifacts artifacts: '**/coverage.html,**/logs/**,**/*${TEST_REPORT}', allowEmptyArchive: true 254 junit testResults: 'tests/**/*.xml', allowEmptyResults: true 255 } 256 } 257 } 258 } 259 } 260 261 stage('Scan Images') { 262 when { 263 allOf { 264 not { buildingTag() } 265 expression {params.PERFORM_SCAN == true} 266 } 267 } 268 steps { 269 script { 270 scanContainerImage "${env.DOCKER_REGISTRY}/${env.DOCKER_REPO}/${DOCKER_PSR_BACKEND_IMAGE_NAME}:${DOCKER_IMAGE_TAG}" 271 } 272 } 273 post { 274 always { 275 archiveArtifacts artifacts: '**/scanning-report*.json,**/*${TEST_REPORT}', allowEmptyArchive: true 276 } 277 } 278 } 279 280 stage('Scenario Tests') { 281 when { 282 allOf { 283 expression {params.SKIP_BUILD == false} 284 expression {params.SKIP_TEST == false} 285 } 286 } 287 environment { 288 DOCKER_REPO="${env.DOCKER_REGISTRY}" 289 KIND_KUBERNETES_CLUSTER_VERSION="${params.KUBERNETES_CLUSTER_VERSION}" 290 OCI_OS_BUCKET="${env.OCI_OS_BUCKET}" 291 OCI_OS_LOCATION="${env.VZ_TEST_BRANCH}" 292 KIND_NODE_COUNT="${env.KIND_NODE_COUNT}" 293 PSR_COMMAND="${env.PSR_COMMAND}" 294 CREATE_CLUSTER_USE_CALICO="${params.CREATE_CLUSTER_USE_CALICO}" 295 } 296 steps { 297 script { 298 runMakeCommand("all") 299 } 300 } 301 post { 302 success { 303 script { 304 if (EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS == true) { 305 dumpK8sCluster('psr-tests-success-cluster-snapshot') 306 } 307 } 308 } 309 failure { 310 script { 311 if ( fileExists(env.TESTS_EXECUTED_FILE) ) { 312 dumpK8sCluster('psr-tests-failure-cluster-snapshot') 313 } 314 postFailureProcessing() 315 } 316 } 317 cleanup { 318 runMakeCommand("cleanup") 319 } 320 } 321 } 322 } 323 324 post { 325 always { 326 archiveArtifacts artifacts: "**/scanning-report*.json,**/coverage.html,**/logs/**,**/verrazzano_images.txt,**/*full-cluster*/**,**/bug-report/**,**/Screenshot*.png,**/ConsoleLog*.log,**/${TEST_REPORT}", allowEmptyArchive: true 327 junit testResults: 'tests/**/*.xml', allowEmptyResults: true 328 } 329 failure { 330 sh """ 331 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o archive.zip ${BUILD_URL}artifact/*zip*/archive.zip 332 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_ARTIFACT_BUCKET} --name ${env.JOB_NAME}/${env.BRANCH_NAME}/${env.BUILD_NUMBER}/archive.zip --file archive.zip 333 rm archive.zip 334 """ 335 script { 336 if (isPagerDutyEnabled() && (env.JOB_NAME == "verrazzano/master" || env.JOB_NAME ==~ "verrazzano/release-1.*")) { 337 pagerduty(resolve: false, serviceKey: "$SERVICE_KEY", incDescription: "Verrazzano: ${env.JOB_NAME} - Failed", incDetails: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}") 338 } 339 if (env.JOB_NAME == "verrazzano/master" || env.JOB_NAME ==~ "verrazzano/release-1.*" || env.BRANCH_NAME ==~ "mark/*") { 340 slackSend ( channel: "$SLACK_ALERT_CHANNEL", message: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}\n\nSuspects:\n${SUSPECT_LIST}" ) 341 } 342 } 343 } 344 cleanup { 345 deleteDir() 346 } 347 } 348 } 349 350 def runMakeCommand(makeTarget) { 351 sh """ 352 cd ${VZ_ROOT}/ci/psr 353 make -I ${VZ_ROOT}/ci/make ${makeTarget} 354 """ 355 } 356 357 // Called in Stage CLI steps 358 def buildCLI() { 359 sh """ 360 cd ${PSR_PATH} 361 make go-build-cli 362 ${VZ_ROOT}/ci/scripts/save_psr_tooling.sh ${env.BRANCH_NAME} ${SHORT_COMMIT_HASH} 363 cp out/linux_amd64/psrctl ${PSR_COMMAND} 364 """ 365 } 366 367 // Called in Stage Build steps 368 // Makes target docker push for psr-backend image 369 def buildImages(dockerImageTag) { 370 sh """ 371 cd ${PSR_PATH} 372 echo 'Building PSR backend image...' 373 make docker-push DOCKER_REGISTRY=${env.DOCKER_REGISTRY} DOCKER_REPO=${env.DOCKER_REPO} DOCKER_IMAGE_NAME=${env.DOCKER_PSR_BACKEND_IMAGE_NAME} DOCKER_IMAGE_TAG=${dockerImageTag} 374 """ 375 } 376 377 // REVIEW: seems redundant with the save_psr_tooling script? 378 def saveCLIExecutables() { 379 sh """ 380 cd ${VZ_ROOT}/tools/psr 381 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_BUCKET} --name ${env.BRANCH_NAME}/psrctl-linux-amd64.tar.gz --file $WORKSPACE/psrctl-linux-amd64.tar.gz 382 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_COMMIT_BUCKET} --name ephemeral/${env.BRANCH_NAME}/${SHORT_COMMIT_HASH}/psrctl-linux-amd64.tar.gz --file $WORKSPACE/psrctl-linux-amd64.tar.gz 383 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_BUCKET} --name ${env.BRANCH_NAME}/psrctl-linux-amd64.tar.gz.sha256 --file $WORKSPACE/psrctl-linux-amd64.tar.gz.sha256 384 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_COMMIT_BUCKET} --name ephemeral/${env.BRANCH_NAME}/${SHORT_COMMIT_HASH}/psrctl-linux-amd64.tar.gz.sha256 --file $WORKSPACE/psrctl-linux-amd64.tar.gz.sha256 385 """ 386 } 387 388 def dumpK8sCluster(dumpDirectory) { 389 sh """ 390 ${VZ_ROOT}/ci/scripts/capture_cluster_snapshot.sh ${dumpDirectory} 391 """ 392 } 393 394 def postFailureProcessing() { 395 sh """ 396 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o ${WORKSPACE}/build-console-output.log ${BUILD_URL}consoleText 397 """ 398 archiveArtifacts artifacts: '**/build-console-output.log,**/*${TEST_REPORT}', allowEmptyArchive: true 399 sh """ 400 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o archive.zip ${BUILD_URL}artifact/*zip*/archive.zip 401 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_ARTIFACT_BUCKET} --name ${env.JOB_NAME}/${env.BRANCH_NAME}/${env.BUILD_NUMBER}/archive.zip --file archive.zip 402 rm archive.zip 403 """ 404 script { 405 if (env.BRANCH_NAME == "master" || env.BRANCH_NAME ==~ "release-.*" || env.BRANCH_NAME ==~ "mark/*") { 406 slackSend ( message: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}" ) 407 } 408 } 409 } 410 411 def getEffectiveDumpOnSuccess() { 412 def effectiveValue = params.DUMP_K8S_CLUSTER_ON_SUCCESS 413 if (FORCE_DUMP_K8S_CLUSTER_ON_SUCCESS.equals("true") && (env.BRANCH_NAME.equals("master"))) { 414 effectiveValue = true 415 echo "Forcing dump on success based on global override setting" 416 } 417 return effectiveValue 418 } 419 420 def isPagerDutyEnabled() { 421 // this controls whether PD alerts are enabled 422 //if (NOTIFY_PAGERDUTY_MAINJOB_FAILURES.equals("true")) { 423 // echo "Pager-Duty notifications enabled via global override setting" 424 // return true 425 //} 426 return false 427 } 428 429 // Called in Stage Clean workspace and checkout steps 430 def moveContentToGoRepoPath() { 431 sh """ 432 rm -rf ${VZ_ROOT} 433 mkdir -p ${VZ_ROOT} 434 tar cf - . | (cd ${VZ_ROOT}/ ; tar xf -) 435 """ 436 } 437 438 // Called in Stage Clean workspace and checkout steps 439 @NonCPS 440 def getCommitList() { 441 echo "Checking for change sets" 442 def commitList = [] 443 def changeSets = currentBuild.changeSets 444 for (int i = 0; i < changeSets.size(); i++) { 445 echo "get commits from change set" 446 def commits = changeSets[i].items 447 for (int j = 0; j < commits.length; j++) { 448 def commit = commits[j] 449 def id = commit.commitId 450 echo "Add commit id: ${id}" 451 commitList.add(id) 452 } 453 } 454 return commitList 455 } 456 457 def trimIfGithubNoreplyUser(userIn) { 458 if (userIn == null) { 459 echo "Not a github noreply user, not trimming: ${userIn}" 460 return userIn 461 } 462 if (userIn.matches(".*\\+.*@users.noreply.github.com.*")) { 463 def userOut = userIn.substring(userIn.indexOf("+") + 1, userIn.indexOf("@")) 464 return userOut; 465 } 466 if (userIn.matches(".*<.*@users.noreply.github.com.*")) { 467 def userOut = userIn.substring(userIn.indexOf("<") + 1, userIn.indexOf("@")) 468 return userOut; 469 } 470 if (userIn.matches(".*@users.noreply.github.com")) { 471 def userOut = userIn.substring(0, userIn.indexOf("@")) 472 return userOut; 473 } 474 echo "Not a github noreply user, not trimming: ${userIn}" 475 return userIn 476 } 477 478 def getSuspectList(commitList, userMappings) { 479 def retValue = "" 480 def suspectList = [] 481 if (commitList == null || commitList.size() == 0) { 482 echo "No commits to form suspect list" 483 } else { 484 for (int i = 0; i < commitList.size(); i++) { 485 def id = commitList[i] 486 try { 487 def gitAuthor = sh( 488 script: "git log --format='%ae' '$id^!'", 489 returnStdout: true 490 ).trim() 491 if (gitAuthor != null) { 492 def author = trimIfGithubNoreplyUser(gitAuthor) 493 echo "DEBUG: author: ${gitAuthor}, ${author}, id: ${id}" 494 if (userMappings.containsKey(author)) { 495 def slackUser = userMappings.get(author) 496 if (!suspectList.contains(slackUser)) { 497 echo "Added ${slackUser} as suspect" 498 retValue += " ${slackUser}" 499 suspectList.add(slackUser) 500 } 501 } else { 502 // If we don't have a name mapping use the commit.author, at least we can easily tell if the mapping gets dated 503 if (!suspectList.contains(author)) { 504 echo "Added ${author} as suspect" 505 retValue += " ${author}" 506 suspectList.add(author) 507 } 508 } 509 } else { 510 echo "No author returned from git" 511 } 512 } catch (Exception e) { 513 echo "INFO: Problem processing commit ${id}, skipping commit: " + e.toString() 514 } 515 } 516 } 517 def startedByUser = ""; 518 def causes = currentBuild.getBuildCauses() 519 echo "causes: " + causes.toString() 520 for (cause in causes) { 521 def causeString = cause.toString() 522 echo "current cause: " + causeString 523 def causeInfo = readJSON text: causeString 524 if (causeInfo.userId != null) { 525 startedByUser = causeInfo.userId 526 } 527 } 528 529 if (startedByUser.length() > 0) { 530 echo "Build was started by a user, adding them to the suspect notification list: ${startedByUser}" 531 def author = trimIfGithubNoreplyUser(startedByUser) 532 echo "DEBUG: author: ${startedByUser}, ${author}" 533 if (userMappings.containsKey(author)) { 534 def slackUser = userMappings.get(author) 535 if (!suspectList.contains(slackUser)) { 536 echo "Added ${slackUser} as suspect" 537 retValue += " ${slackUser}" 538 suspectList.add(slackUser) 539 } 540 } else { 541 // If we don't have a name mapping use the commit.author, at least we can easily tell if the mapping gets dated 542 if (!suspectList.contains(author)) { 543 echo "Added ${author} as suspect" 544 retValue += " ${author}" 545 suspectList.add(author) 546 } 547 } 548 } else { 549 echo "Build not started by a user, not adding to notification list" 550 } 551 echo "returning suspect list: ${retValue}" 552 return retValue 553 }