github.com/verrazzano/verrazzano@v1.7.1/ci/psr/Jenkinsfile (about) 1 // Copyright (c) 2023, Oracle and/or its affiliates. 2 // Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. 3 4 def DOCKER_IMAGE_TAG 5 def SUSPECT_LIST = "" 6 def VERRAZZANO_DEV_VERSION = "" 7 def EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = false 8 def tarfilePrefix="" 9 def storeLocation="" 10 11 def agentLabel = env.JOB_NAME.contains('master') ? "2.0-large-phx" : "2.0-large" 12 13 pipeline { 14 options { 15 skipDefaultCheckout true 16 copyArtifactPermission('*'); 17 timestamps () 18 } 19 20 agent { 21 docker { 22 image "${RUNNER_DOCKER_IMAGE}" 23 args "${RUNNER_DOCKER_ARGS}" 24 registryUrl "${RUNNER_DOCKER_REGISTRY_URL}" 25 registryCredentialsId 'ocir-pull-and-push-account' 26 label "${agentLabel}" 27 } 28 } 29 30 parameters { 31 choice (name: 'KUBERNETES_CLUSTER_VERSION', description: 'Kubernetes Version for KinD Cluster', choices: [ "1.27", "1.26", "1.25", "1.24" ]) 32 string (name: 'VZ_BRANCH_TO_USE', defaultValue: 'master', trim: true, description: 'This is the name of the Verrazzano product branch to use for testing the PSR tooling; CURRENT means using the working branch') 33 string (name: 'VERRAZZANO_OPERATOR_IMAGE', defaultValue: 'NONE', trim: true, description: 'Uses a specific Verrazzano platform operator image name (in ghcr.io repo). If not specified, the latest operator.yaml from related Verrazzano repo branch will be used to create Verrazzano platform operator manifest') 34 string (name: 'KIND_NODE_COUNT', defaultValue: '3', trim: true, description: 'Number of nodes for the KIND cluster for smoke testing') 35 booleanParam (description: 'Whether to create the cluster with Calico for AT testing (defaults to true)', name: 'CREATE_CLUSTER_USE_CALICO', defaultValue: true) 36 booleanParam (description: 'Whether to capture full cluster snapshot on test failure', name: 'CAPTURE_FULL_CLUSTER', defaultValue: false) 37 booleanParam (description: 'Whether to dump k8s cluster on success (off by default can be useful to capture for comparing to failed cluster)', name: 'DUMP_K8S_CLUSTER_ON_SUCCESS', defaultValue: false) 38 39 booleanParam (name: 'PERFORM_SCAN', defaultValue: false, description: 'Whether to perform a scan of the built images') 40 booleanParam (name: 'FAIL_IF_COVERAGE_DECREASED', defaultValue: false, description: 'Whether to fail build if UT coverage number decreases lower than its release-* coverage from object storage. This defaults to true, meaning Any non release-*/master branch will fail if its coverage is lower. This can be disabled so that jobs only WARN when coverage drops but not fail.') 41 booleanParam (name: 'UPLOAD_UNIT_TEST_COVERAGE', defaultValue: false, description: 'Whether to write the UT coverage number to object storage. This always occurs for release-*/master branches. Defaults to true, but it can be disabled to not always upload.') 42 booleanParam (name: 'SKIP_BUILD', defaultValue: false, description: "Skip the PSR tool and Worker builds") 43 booleanParam (name: 'SKIP_TEST', defaultValue: false, description: "Skip the psrctl scenario validation tests") 44 booleanParam (name: 'VZ_TEST_DEBUG', defaultValue: true, description: "Enables debug of Verrazzano installation scripts") 45 string (name: 'TAGGED_TESTS', defaultValue: '', trim: true, description: 'A comma separated list of build tags for tests that should be executed (e.g. unstable_test). Default:') 46 string (name: 'INCLUDED_TESTS', defaultValue: '.*', description: 'A regex matching any fully qualified test file that should be executed (e.g. examples/helidon/). Default: .*', trim: true) 47 string (name: 'EXCLUDED_TESTS', defaultValue: '_excluded_test', description: 'A regex matching any fully qualified test file that should not be executed (e.g. multicluster/|_excluded_test). Default: _excluded_test', trim: true) 48 } 49 50 environment { 51 TEST_ENV = "PSR" 52 53 DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME = 'psr-backend' 54 DOCKER_PSR_BRANCH_BACKEND_IMAGE_NAME = '${DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME}-jenkins' 55 DOCKER_PSR_BACKEND_IMAGE_NAME = "${env.BRANCH_NAME ==~ /^release-.*/ || env.BRANCH_NAME == 'master' ? env.DOCKER_PSR_BACKEND_PUBLISH_IMAGE_NAME : env.DOCKER_PSR_BRANCH_BACKEND_IMAGE_NAME}" 56 57 GOPATH = '/home/opc/go' 58 GO_REPO_PATH = "${GOPATH}/src/github.com/verrazzano" 59 VZ_ROOT="${GO_REPO_PATH}/verrazzano" 60 PSR_PATH = "${VZ_ROOT}/tools/psr" 61 62 DOCKER_CREDS = credentials('github-packages-credentials-rw') 63 DOCKER_EMAIL = credentials('github-packages-email') 64 DOCKER_REGISTRY = 'ghcr.io' 65 DOCKER_REPO = "verrazzano" 66 OCR_CREDS = credentials('ocr-pull-and-push-account') 67 OCR_REPO = 'container-registry.oracle.com' 68 69 NETRC_FILE = credentials('netrc') 70 GITHUB_PKGS_CREDS = credentials('github-packages-credentials-rw') 71 SERVICE_KEY = credentials('PAGERDUTY_SERVICE_KEY') 72 73 // used for console artifact capture on failure 74 JENKINS_READ = credentials('jenkins-auditor') 75 76 OCI_CLI_AUTH="instance_principal" 77 OCI_OS_NAMESPACE = credentials('oci-os-namespace') 78 OCI_OS_ARTIFACT_BUCKET="build-failure-artifacts" 79 OCI_OS_REGION="us-phoenix-1" 80 81 // used to emit metrics 82 PROMETHEUS_CREDENTIALS = credentials('prometheus-credentials') 83 84 OCIR_SCAN_COMPARTMENT = credentials('ocir-scan-compartment') 85 OCIR_SCAN_TARGET = credentials('ocir-scan-target') 86 OCIR_SCAN_REGISTRY = credentials('ocir-scan-registry') 87 OCIR_SCAN_REPOSITORY_PATH = credentials('ocir-scan-repository-path') 88 DOCKER_SCAN_CREDS = credentials('v8odev-ocir') 89 90 // Environment variable for Verrazzano CLI executable 91 VZ_COMMAND="${WORKSPACE}/vz" 92 93 // used to generate Ginkgo test reports 94 TEST_REPORT = "test-report.xml" 95 TESTS_EXECUTED_FILE = "${WORKSPACE}/tests_executed_file.tmp" 96 97 KUBECONFIG = "${WORKSPACE}/test_kubeconfig" 98 VERRAZZANO_KUBECONFIG = "${KUBECONFIG}" 99 WILDCARD_DNS_DOMAIN = "nip.io" 100 101 PSR_COMMAND = "${VZ_ROOT}/psrctl" 102 } 103 104 stages { 105 stage('Clean workspace and checkout') { 106 steps { 107 sh """ 108 echo "${NODE_LABELS}" 109 """ 110 111 script { 112 def scmInfo = checkout scm 113 env.GIT_COMMIT = scmInfo.GIT_COMMIT 114 env.GIT_BRANCH = scmInfo.GIT_BRANCH 115 echo "SCM checkout of ${env.GIT_BRANCH} at ${env.GIT_COMMIT}" 116 } 117 sh """ 118 cp -f "${NETRC_FILE}" $HOME/.netrc 119 chmod 600 $HOME/.netrc 120 """ 121 122 script { 123 try { 124 sh """ 125 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REGISTRY} -u ${DOCKER_CREDS_USR} --password-stdin 126 """ 127 } catch(error) { 128 echo "docker login failed, retrying after sleep" 129 retry(4) { 130 sleep(30) 131 sh """ 132 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REGISTRY} -u ${DOCKER_CREDS_USR} --password-stdin 133 """ 134 } 135 } 136 } 137 moveContentToGoRepoPath() 138 139 script { 140 EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = getEffectiveDumpOnSuccess() 141 // Create the image tag 142 def props = readProperties file: '.verrazzano-development-version' 143 VERRAZZANO_DEV_VERSION = props['verrazzano-development-version'] 144 TIMESTAMP = sh(returnStdout: true, script: "date +%Y%m%d%H%M%S").trim() 145 SHORT_COMMIT_HASH = sh(returnStdout: true, script: "echo $env.GIT_COMMIT | head -c 8") 146 env.VERRAZZANO_VERSION = "${VERRAZZANO_DEV_VERSION}" 147 if (!"${env.GIT_BRANCH}".startsWith("release-")) { 148 env.VERRAZZANO_VERSION = "${env.VERRAZZANO_VERSION}-${env.BUILD_NUMBER}+${SHORT_COMMIT_HASH}" 149 } 150 DOCKER_IMAGE_TAG = "v${VERRAZZANO_DEV_VERSION}-${TIMESTAMP}-${SHORT_COMMIT_HASH}" 151 152 env.VZ_TEST_BRANCH = params.VZ_BRANCH_TO_USE 153 if (params.VZ_BRANCH_TO_USE == "CURRENT") { 154 env.VZ_TEST_BRANCH = env.BRANCH_NAME 155 } 156 echo "VZ_TEST_BRANCH: ${env.VZ_TEST_BRANCH}" 157 158 // update the description with some meaningful info 159 currentBuild.description = SHORT_COMMIT_HASH + " : " + env.GIT_COMMIT 160 def currentCommitHash = env.GIT_COMMIT 161 def commitList = getCommitList() 162 withCredentials([file(credentialsId: 'jenkins-to-slack-users', variable: 'JENKINS_TO_SLACK_JSON')]) { 163 def userMappings = readJSON file: JENKINS_TO_SLACK_JSON 164 SUSPECT_LIST = getSuspectList(commitList, userMappings) 165 echo "Suspect list: ${SUSPECT_LIST}" 166 } 167 } 168 } 169 } 170 171 stage('Parallel Build, Test, and Compliance') { 172 parallel { 173 stage('Build PSR CLI and Save Binaries') { 174 when { 175 expression {params.SKIP_BUILD == false} 176 } 177 environment { 178 DOCKER_REPO="${DOCKER_REPO}" 179 DOCKER_REGISTRY="${DOCKER_REGISTRY}" 180 DOCKER_IMAGE_NAME="${DOCKER_PSR_BACKEND_IMAGE_NAME}" 181 DOCKER_IMAGE_TAG="${DOCKER_IMAGE_TAG}" 182 } 183 steps { 184 buildCLI() 185 } 186 post { 187 success { 188 script { 189 archiveArtifacts artifacts: '**/*.tar.gz*', allowEmptyArchive: true 190 echo "Saving CLI Binary" 191 saveCLIExecutables() 192 } 193 194 } 195 } 196 } 197 198 stage('Build PSR Images and Save Generated Files') { 199 when { 200 allOf { 201 not { buildingTag() } 202 expression {params.SKIP_BUILD == false} 203 } 204 } 205 environment { 206 DOCKER_REPO="${DOCKER_REPO}" 207 DOCKER_REGISTRY="${DOCKER_REGISTRY}" 208 DOCKER_IMAGE_NAME="${DOCKER_PSR_BACKEND_IMAGE_NAME}" 209 DOCKER_IMAGE_TAG="${DOCKER_IMAGE_TAG}" 210 } 211 steps { 212 script { 213 buildImages("${DOCKER_IMAGE_TAG}") 214 } 215 } 216 } 217 218 stage('Quality, Compliance Checks, and Unit Tests') { 219 when { 220 allOf { 221 not { buildingTag() } 222 expression {params.SKIP_BUILD == false} 223 } 224 } 225 steps { 226 sh """ 227 cd ${PSR_PATH} 228 make psr-quality 229 """ 230 } 231 post { 232 always { 233 sh """ 234 cd ${PSR_PATH} 235 cp coverage.html ${WORKSPACE} 236 cp coverage.xml ${WORKSPACE} 237 ${VZ_ROOT}/build/copy-junit-output.sh ${WORKSPACE} 238 """ 239 cobertura(coberturaReportFile: 'coverage.xml', 240 enableNewApi: true, 241 autoUpdateHealth: false, 242 autoUpdateStability: false, 243 failUnstable: true, 244 failUnhealthy: true, 245 failNoReports: true, 246 onlyStable: false, 247 fileCoverageTargets: '100, 0, 0', 248 lineCoverageTargets: '68, 68, 68', 249 packageCoverageTargets: '100, 0, 0', 250 ) 251 archiveArtifacts artifacts: '**/coverage.html,**/logs/**,**/*${TEST_REPORT}', allowEmptyArchive: true 252 junit testResults: 'tests/**/*.xml', allowEmptyResults: true 253 } 254 } 255 } 256 } 257 } 258 259 stage('Scan Images') { 260 when { 261 allOf { 262 not { buildingTag() } 263 expression {params.PERFORM_SCAN == true} 264 } 265 } 266 steps { 267 script { 268 scanContainerImage "${env.DOCKER_REGISTRY}/${env.DOCKER_REPO}/${DOCKER_PSR_BACKEND_IMAGE_NAME}:${DOCKER_IMAGE_TAG}" 269 } 270 } 271 post { 272 always { 273 archiveArtifacts artifacts: '**/scanning-report*.json,**/*${TEST_REPORT}', allowEmptyArchive: true 274 } 275 } 276 } 277 278 stage('Scenario Tests') { 279 when { 280 allOf { 281 expression {params.SKIP_BUILD == false} 282 expression {params.SKIP_TEST == false} 283 } 284 } 285 environment { 286 DOCKER_REPO="${env.DOCKER_REGISTRY}" 287 KIND_KUBERNETES_CLUSTER_VERSION="${params.KUBERNETES_CLUSTER_VERSION}" 288 OCI_OS_BUCKET="${env.OCI_OS_BUCKET}" 289 OCI_OS_LOCATION="${env.VZ_TEST_BRANCH}" 290 KIND_NODE_COUNT="${env.KIND_NODE_COUNT}" 291 PSR_COMMAND="${env.PSR_COMMAND}" 292 CREATE_CLUSTER_USE_CALICO="${params.CREATE_CLUSTER_USE_CALICO}" 293 } 294 steps { 295 script { 296 runMakeCommand("all") 297 } 298 } 299 post { 300 success { 301 script { 302 if (EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS == true) { 303 dumpK8sCluster('psr-tests-success-cluster-snapshot') 304 } 305 } 306 } 307 failure { 308 script { 309 if ( fileExists(env.TESTS_EXECUTED_FILE) ) { 310 dumpK8sCluster('psr-tests-failure-cluster-snapshot') 311 } 312 postFailureProcessing() 313 } 314 } 315 cleanup { 316 runMakeCommand("cleanup") 317 } 318 } 319 } 320 } 321 322 post { 323 always { 324 archiveArtifacts artifacts: "**/scanning-report*.json,**/coverage.html,**/logs/**,**/verrazzano_images.txt,**/*full-cluster*/**,**/bug-report/**,**/Screenshot*.png,**/ConsoleLog*.log,**/${TEST_REPORT}", allowEmptyArchive: true 325 junit testResults: 'tests/**/*.xml', allowEmptyResults: true 326 } 327 failure { 328 sh """ 329 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o archive.zip ${BUILD_URL}artifact/*zip*/archive.zip 330 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_ARTIFACT_BUCKET} --name ${env.JOB_NAME}/${env.BRANCH_NAME}/${env.BUILD_NUMBER}/archive.zip --file archive.zip 331 rm archive.zip 332 """ 333 script { 334 if (isPagerDutyEnabled() && (env.JOB_NAME == "verrazzano/master" || env.JOB_NAME ==~ "verrazzano/release-1.*")) { 335 pagerduty(resolve: false, serviceKey: "$SERVICE_KEY", incDescription: "Verrazzano: ${env.JOB_NAME} - Failed", incDetails: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}") 336 } 337 if (env.JOB_NAME == "verrazzano/master" || env.JOB_NAME ==~ "verrazzano/release-1.*" || env.BRANCH_NAME ==~ "mark/*") { 338 slackSend ( channel: "$SLACK_ALERT_CHANNEL", message: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}\n\nSuspects:\n${SUSPECT_LIST}" ) 339 } 340 } 341 } 342 cleanup { 343 deleteDir() 344 } 345 } 346 } 347 348 def runMakeCommand(makeTarget) { 349 sh """ 350 cd ${VZ_ROOT}/ci/psr 351 make -I ${VZ_ROOT}/ci/make ${makeTarget} 352 """ 353 } 354 355 // Called in Stage CLI steps 356 def buildCLI() { 357 sh """ 358 cd ${PSR_PATH} 359 make go-build-cli 360 ${VZ_ROOT}/ci/scripts/save_psr_tooling.sh ${env.BRANCH_NAME} ${SHORT_COMMIT_HASH} 361 cp out/linux_amd64/psrctl ${PSR_COMMAND} 362 """ 363 } 364 365 // Called in Stage Build steps 366 // Makes target docker push for psr-backend image 367 def buildImages(dockerImageTag) { 368 sh """ 369 cd ${PSR_PATH} 370 echo 'Building PSR backend image...' 371 make docker-push DOCKER_REGISTRY=${env.DOCKER_REGISTRY} DOCKER_REPO=${env.DOCKER_REPO} DOCKER_IMAGE_NAME=${env.DOCKER_PSR_BACKEND_IMAGE_NAME} DOCKER_IMAGE_TAG=${dockerImageTag} 372 """ 373 } 374 375 // REVIEW: seems redundant with the save_psr_tooling script? 376 def saveCLIExecutables() { 377 sh """ 378 cd ${VZ_ROOT}/tools/psr 379 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_BUCKET} --name ${env.BRANCH_NAME}/psrctl-linux-amd64.tar.gz --file $WORKSPACE/psrctl-linux-amd64.tar.gz 380 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_COMMIT_BUCKET} --name ephemeral/${env.BRANCH_NAME}/${SHORT_COMMIT_HASH}/psrctl-linux-amd64.tar.gz --file $WORKSPACE/psrctl-linux-amd64.tar.gz 381 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_BUCKET} --name ${env.BRANCH_NAME}/psrctl-linux-amd64.tar.gz.sha256 --file $WORKSPACE/psrctl-linux-amd64.tar.gz.sha256 382 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_COMMIT_BUCKET} --name ephemeral/${env.BRANCH_NAME}/${SHORT_COMMIT_HASH}/psrctl-linux-amd64.tar.gz.sha256 --file $WORKSPACE/psrctl-linux-amd64.tar.gz.sha256 383 """ 384 } 385 386 def dumpK8sCluster(dumpDirectory) { 387 sh """ 388 ${VZ_ROOT}/ci/scripts/capture_cluster_snapshot.sh ${dumpDirectory} 389 """ 390 } 391 392 def postFailureProcessing() { 393 sh """ 394 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o ${WORKSPACE}/build-console-output.log ${BUILD_URL}consoleText 395 """ 396 archiveArtifacts artifacts: '**/build-console-output.log,**/*${TEST_REPORT}', allowEmptyArchive: true 397 sh """ 398 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o archive.zip ${BUILD_URL}artifact/*zip*/archive.zip 399 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_ARTIFACT_BUCKET} --name ${env.JOB_NAME}/${env.BRANCH_NAME}/${env.BUILD_NUMBER}/archive.zip --file archive.zip 400 rm archive.zip 401 """ 402 script { 403 if (env.BRANCH_NAME == "master" || env.BRANCH_NAME ==~ "release-.*" || env.BRANCH_NAME ==~ "mark/*") { 404 slackSend ( message: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}" ) 405 } 406 } 407 } 408 409 def getEffectiveDumpOnSuccess() { 410 def effectiveValue = params.DUMP_K8S_CLUSTER_ON_SUCCESS 411 if (FORCE_DUMP_K8S_CLUSTER_ON_SUCCESS.equals("true") && (env.BRANCH_NAME.equals("master"))) { 412 effectiveValue = true 413 echo "Forcing dump on success based on global override setting" 414 } 415 return effectiveValue 416 } 417 418 def isPagerDutyEnabled() { 419 // this controls whether PD alerts are enabled 420 //if (NOTIFY_PAGERDUTY_MAINJOB_FAILURES.equals("true")) { 421 // echo "Pager-Duty notifications enabled via global override setting" 422 // return true 423 //} 424 return false 425 } 426 427 // Called in Stage Clean workspace and checkout steps 428 def moveContentToGoRepoPath() { 429 sh """ 430 rm -rf ${VZ_ROOT} 431 mkdir -p ${VZ_ROOT} 432 tar cf - . | (cd ${VZ_ROOT}/ ; tar xf -) 433 """ 434 } 435 436 // Called in Stage Clean workspace and checkout steps 437 @NonCPS 438 def getCommitList() { 439 echo "Checking for change sets" 440 def commitList = [] 441 def changeSets = currentBuild.changeSets 442 for (int i = 0; i < changeSets.size(); i++) { 443 echo "get commits from change set" 444 def commits = changeSets[i].items 445 for (int j = 0; j < commits.length; j++) { 446 def commit = commits[j] 447 def id = commit.commitId 448 echo "Add commit id: ${id}" 449 commitList.add(id) 450 } 451 } 452 return commitList 453 } 454 455 def trimIfGithubNoreplyUser(userIn) { 456 if (userIn == null) { 457 echo "Not a github noreply user, not trimming: ${userIn}" 458 return userIn 459 } 460 if (userIn.matches(".*\\+.*@users.noreply.github.com.*")) { 461 def userOut = userIn.substring(userIn.indexOf("+") + 1, userIn.indexOf("@")) 462 return userOut; 463 } 464 if (userIn.matches(".*<.*@users.noreply.github.com.*")) { 465 def userOut = userIn.substring(userIn.indexOf("<") + 1, userIn.indexOf("@")) 466 return userOut; 467 } 468 if (userIn.matches(".*@users.noreply.github.com")) { 469 def userOut = userIn.substring(0, userIn.indexOf("@")) 470 return userOut; 471 } 472 echo "Not a github noreply user, not trimming: ${userIn}" 473 return userIn 474 } 475 476 def getSuspectList(commitList, userMappings) { 477 def retValue = "" 478 def suspectList = [] 479 if (commitList == null || commitList.size() == 0) { 480 echo "No commits to form suspect list" 481 } else { 482 for (int i = 0; i < commitList.size(); i++) { 483 def id = commitList[i] 484 try { 485 def gitAuthor = sh( 486 script: "git log --format='%ae' '$id^!'", 487 returnStdout: true 488 ).trim() 489 if (gitAuthor != null) { 490 def author = trimIfGithubNoreplyUser(gitAuthor) 491 echo "DEBUG: author: ${gitAuthor}, ${author}, id: ${id}" 492 if (userMappings.containsKey(author)) { 493 def slackUser = userMappings.get(author) 494 if (!suspectList.contains(slackUser)) { 495 echo "Added ${slackUser} as suspect" 496 retValue += " ${slackUser}" 497 suspectList.add(slackUser) 498 } 499 } else { 500 // If we don't have a name mapping use the commit.author, at least we can easily tell if the mapping gets dated 501 if (!suspectList.contains(author)) { 502 echo "Added ${author} as suspect" 503 retValue += " ${author}" 504 suspectList.add(author) 505 } 506 } 507 } else { 508 echo "No author returned from git" 509 } 510 } catch (Exception e) { 511 echo "INFO: Problem processing commit ${id}, skipping commit: " + e.toString() 512 } 513 } 514 } 515 def startedByUser = ""; 516 def causes = currentBuild.getBuildCauses() 517 echo "causes: " + causes.toString() 518 for (cause in causes) { 519 def causeString = cause.toString() 520 echo "current cause: " + causeString 521 def causeInfo = readJSON text: causeString 522 if (causeInfo.userId != null) { 523 startedByUser = causeInfo.userId 524 } 525 } 526 527 if (startedByUser.length() > 0) { 528 echo "Build was started by a user, adding them to the suspect notification list: ${startedByUser}" 529 def author = trimIfGithubNoreplyUser(startedByUser) 530 echo "DEBUG: author: ${startedByUser}, ${author}" 531 if (userMappings.containsKey(author)) { 532 def slackUser = userMappings.get(author) 533 if (!suspectList.contains(slackUser)) { 534 echo "Added ${slackUser} as suspect" 535 retValue += " ${slackUser}" 536 suspectList.add(slackUser) 537 } 538 } else { 539 // If we don't have a name mapping use the commit.author, at least we can easily tell if the mapping gets dated 540 if (!suspectList.contains(author)) { 541 echo "Added ${author} as suspect" 542 retValue += " ${author}" 543 suspectList.add(author) 544 } 545 } 546 } else { 547 echo "Build not started by a user, not adding to notification list" 548 } 549 echo "returning suspect list: ${retValue}" 550 return retValue 551 }