github.com/verrazzano/verrazzano@v1.7.1/ci/generic/Jenkinsfile (about) 1 // Copyright (c) 2023, 2024, Oracle and/or its affiliates. 2 // Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. 3 4 def DOCKER_IMAGE_TAG 5 def agentLabel = env.JOB_NAME.contains('master') ? "2.0-large-phx" : "2.0-large" 6 def EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = false 7 8 targetsKey = "targets" 9 displayNameKey = "displayname" 10 unknownTargetName = "UNKNOWN" 11 testGroupsKey = "testGroups" 12 13 pipeline { 14 options { 15 timeout(time: 1, unit: 'HOURS') 16 skipDefaultCheckout true 17 timestamps () 18 } 19 20 agent { 21 docker { 22 image "${RUNNER_DOCKER_IMAGE}" 23 args "${RUNNER_DOCKER_ARGS}" 24 registryUrl "${RUNNER_DOCKER_REGISTRY_URL}" 25 registryCredentialsId 'ocir-pull-and-push-account' 26 label "${agentLabel}" 27 } 28 } 29 30 parameters { 31 choice (name: 'KUBERNETES_CLUSTER_VERSION', 32 description: 'Kubernetes Version for KinD Cluster', 33 // 1st choice is the default value 34 choices: [ "1.27", "1.26","1.25", "1.24" ]) 35 string (name: 'GIT_COMMIT_TO_USE', 36 defaultValue: 'NONE', 37 description: 'This is the full git commit hash from the source build to be used for all jobs', 38 trim: true) 39 string (name: 'PIPELINE_DESCRIPTOR', 40 defaultValue: 'ci/generic/kind-acceptance-tests.yaml', 41 description: 'Pipeline descriptor file location, relative to base of Verrazzano repo', 42 trim: true) 43 string (name: 'VERRAZZANO_OPERATOR_IMAGE', 44 defaultValue: 'NONE', 45 description: 'Verrazzano platform operator image name (in ghcr.io repo). If not specified, the operator.yaml from Verrazzano repo will be used to create Verrazzano platform operator', 46 trim: true) 47 choice (name: 'WILDCARD_DNS_DOMAIN', 48 description: 'This is the wildcard DNS domain', 49 // 1st choice is the default value 50 choices: [ "nip.io", "sslip.io"]) 51 choice (name: 'CRD_API_VERSION', 52 description: 'This is the API crd version.', 53 // 1st choice is the default value 54 choices: [ "v1beta1", "v1alpha1"]) 55 booleanParam (description: 'Whether to create the cluster with Calico for AT testing (defaults to true)', name: 'CREATE_CLUSTER_USE_CALICO', defaultValue: true) 56 booleanParam (description: 'Whether to dump k8s cluster on success (off by default can be useful to capture for comparing to failed cluster)', name: 'DUMP_K8S_CLUSTER_ON_SUCCESS', defaultValue: false) 57 string (name: 'CONSOLE_REPO_BRANCH', 58 defaultValue: '', 59 description: 'The branch to check out after cloning the console repository.', 60 trim: true) 61 booleanParam (description: 'Whether to enable debug logging of the istio envoy in the VZ API pod', name: 'ENABLE_API_ENVOY_LOGGING', defaultValue: true) 62 string (name: 'TAGGED_TESTS', 63 defaultValue: '', 64 description: 'A comma separated list of build tags for tests that should be executed (e.g. unstable_test). Default:', 65 trim: true) 66 string (name: 'INCLUDED_TESTS', 67 defaultValue: '.*', 68 description: 'A regex matching any fully qualified test file that should be executed (e.g. examples/helidon/). Default: .*', 69 trim: true) 70 string (name: 'EXCLUDED_TESTS', 71 defaultValue: '_excluded_test', 72 description: 'A regex matching any fully qualified test file that should not be executed (e.g. multicluster/|_excluded_test). Default: _excluded_test', 73 trim: true) 74 booleanParam (description: 'Whether to capture full cluster snapshot on test failure', name: 'CAPTURE_FULL_CLUSTER', defaultValue: false) 75 } 76 77 environment { 78 DOCKER_PLATFORM_CI_IMAGE_NAME = 'verrazzano-platform-operator-jenkins' 79 DOCKER_PLATFORM_PUBLISH_IMAGE_NAME = 'verrazzano-platform-operator' 80 GOPATH = '/home/opc/go' 81 GO_REPO_PATH = "${GOPATH}/src/github.com/verrazzano" 82 DOCKER_CREDS = credentials('github-packages-credentials-rw') 83 DOCKER_EMAIL = credentials('github-packages-email') 84 DOCKER_REPO = 'ghcr.io' 85 DOCKER_NAMESPACE = 'verrazzano' 86 87 NETRC_FILE = credentials('netrc') 88 CLUSTER_NAME = 'verrazzano' 89 POST_DUMP_FAILED_FILE = "${WORKSPACE}/post_dump_failed_file.tmp" 90 TESTS_EXECUTED_FILE = "${WORKSPACE}/tests_executed_file.tmp" 91 KUBECONFIG = "${WORKSPACE}/test_kubeconfig" 92 VERRAZZANO_KUBECONFIG = "${KUBECONFIG}" 93 OCR_CREDS = credentials('ocr-pull-and-push-account') 94 OCR_REPO = 'container-registry.oracle.com' 95 IMAGE_PULL_SECRET = 'verrazzano-container-registry' 96 INSTALL_PROFILE = "dev" 97 VZ_ENVIRONMENT_NAME = "default" 98 TEST_ROOT = "${WORKSPACE}/tests/e2e" 99 TEST_SCRIPTS_DIR = "${TEST_ROOT}/config/scripts" 100 INSTALL_CONFIG_FILE_KIND = "${TEST_SCRIPTS_DIR}/${params.CRD_API_VERSION}/install-verrazzano-kind.yaml" 101 VERRAZZANO_OPERATOR_IMAGE="${params.VERRAZZANO_OPERATOR_IMAGE}" 102 103 WEBLOGIC_PSW = credentials('weblogic-example-domain-password') // required by WebLogic application and console ingress test 104 DATABASE_PSW = credentials('todo-mysql-password') // required by console ingress test 105 106 // Environment variables required to capture cluster snapshot and bug report on test failure 107 DUMP_KUBECONFIG="${KUBECONFIG}" 108 DUMP_COMMAND="${GO_REPO_PATH}/verrazzano/tools/scripts/k8s-dump-cluster.sh" 109 DUMP_ROOT_DIRECTORY="${WORKSPACE}/test-cluster-snapshots" 110 CAPTURE_FULL_CLUSTER="${params.CAPTURE_FULL_CLUSTER}" 111 112 // Environment variable for Verrazzano CLI executable 113 VZ_COMMAND="${GO_REPO_PATH}/vz" 114 115 POST_INSTALL_DUMP="true" 116 117 VERRAZZANO_INSTALL_LOGS_DIR="${WORKSPACE}/verrazzano/platform-operator/scripts/install/build/logs" 118 VERRAZZANO_INSTALL_LOG="verrazzano-install.log" 119 120 // used for console artifact capture on failure 121 JENKINS_READ = credentials('jenkins-auditor') 122 OCI_CLI_AUTH="instance_principal" 123 OCI_OS_NAMESPACE = credentials('oci-os-namespace') 124 OCI_OS_ARTIFACT_BUCKET="build-failure-artifacts" 125 126 // used to emit metrics 127 PROMETHEUS_CREDENTIALS = credentials('prometheus-credentials') 128 TEST_ENV_LABEL = "kind" 129 TEST_ENV = "KIND" 130 K8S_VERSION_LABEL = "${params.KUBERNETES_CLUSTER_VERSION}" 131 132 // used to generate Ginkgo test reports 133 TEST_REPORT = "test-report.xml" 134 TEST_REPORT_DIR = "${WORKSPACE}/tests/e2e" 135 } 136 137 stages { 138 stage('Clean workspace and checkout') { 139 steps { 140 pipelineSetup() 141 } 142 } 143 144 stage('Run Acceptance Tests') { 145 environment { 146 KUBERNETES_CLUSTER_VERSION="${params.KUBERNETES_CLUSTER_VERSION}" 147 OCI_CLI_AUTH="instance_principal" 148 OCI_OS_NAMESPACE = credentials('oci-os-namespace') 149 OCI_OS_LOCATION="ephemeral/${env.BRANCH_NAME}/${SHORT_COMMIT_HASH}" 150 PIPELINE_CONFIG=getPipelineDescriptorPath() 151 } 152 steps { 153 echo "Executing pipeline configuration ${env.PIPELINE_CONFIG}" 154 runDynamicStages(env.PIPELINE_CONFIG) 155 } 156 post { 157 always { 158 archiveArtifacts artifacts: "**/coverage.html,**/logs/**,**/verrazzano_images.txt,**/*full-cluster*/**,**/*bug-report*/**,**/Screenshot*.png,**/ConsoleLog*.log,**/*${TEST_REPORT}", allowEmptyArchive: true 159 junit testResults: "**/${TEST_REPORT}", allowEmptyResults: true 160 } 161 failure { 162 script { 163 if ( fileExists(env.TESTS_EXECUTED_FILE) ) { 164 dumpK8sCluster('new-kind-acceptance-tests-cluster-snapshot') 165 } 166 } 167 } 168 success { 169 script { 170 if (EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS == true && fileExists(env.TESTS_EXECUTED_FILE) ) { 171 dumpK8sCluster('new-kind-acceptance-tests-cluster-snapshot') 172 } 173 } 174 } 175 } 176 } 177 } 178 post { 179 always { 180 archiveArtifacts artifacts: "**/coverage.html,**/logs/**,**/verrazzano_images.txt,**/*cluster-snapshot*/**,**/Screenshot*.png,**/ConsoleLog*.log,**/*${TEST_REPORT}", allowEmptyArchive: true 181 junit testResults: "**/${TEST_REPORT}", allowEmptyResults: true 182 183 script { 184 runMakeCommand("cleanup") 185 } 186 } 187 failure { 188 postFailureProcessing() 189 } 190 cleanup { 191 deleteDir() 192 } 193 } 194 } 195 196 def getPipelineDescriptorPath() { 197 return "${WORKSPACE}/${params.PIPELINE_DESCRIPTOR}" 198 } 199 200 def getPipelineName() { 201 data = readYaml file: getPipelineDescriptorPath() 202 return data.metadata.name 203 } 204 205 def runDynamicStages(pipelineDescriptor) { 206 script { 207 data = readYaml file: pipelineDescriptor 208 println data 209 print "Name: " + data.metadata.name 210 print "Agent label: " + data.metadata.agent["label"] 211 212 testGroups = [:] 213 if (data.containsKey(testGroupsKey)) { 214 testGroups = processTestGroups(data[testGroupsKey]) 215 } 216 print "Test groups: " + testGroups 217 218 print "Stages: " + data.stages 219 stages = data.stages 220 for (stageDescriptor in stages) { 221 stageName = stageDescriptor.name 222 print "Stage: " + stageName 223 targetsMap = [:] 224 if (stageDescriptor.containsKey(targetsKey)) { 225 targetsMap.putAll(processTargets(stageDescriptor[targetsKey])) 226 } 227 if (stageDescriptor.containsKey(testGroupsKey)) { 228 groupNames = stageDescriptor[testGroupsKey] 229 targetsMap.putAll(addTestGroups(testGroups, groupNames)) 230 } 231 print "Stage Map to use: " + targetsMap 232 stage(stageName) { 233 try { 234 echo "Executing stage ${stageName}, targets " + targetsMap.keySet() 235 parallel targetsMap 236 } finally { 237 echo "Archiving artifacts for stage ${stageName}" 238 archiveArtifacts artifacts: '**/coverage.html,**/logs/*,**/*${TEST_REPORT},*/Screenshot*.png,**/ConsoleLog*.log,**/*cluster-snapshot*/**', allowEmptyArchive: true 239 junit testResults: '**/*test-result.xml', allowEmptyResults: true 240 } 241 } 242 } 243 } 244 } 245 246 // Returns an map of all targets keyed by display name 247 def processTargets(targets) { 248 targetMap = [:] 249 for (target in targets) { 250 targetName = target.name 251 if (target.containsKey(displayNameKey)) { 252 targetName = target[displayNameKey] 253 } 254 if (targetName.length() == 0) { 255 targetName = unknownTargetName 256 } 257 targetMap.putAll(buildTargetClosure(targetName, target.target)) 258 } 259 return targetMap 260 } 261 262 // Returns an aggregate Map of all targets associated with the list of test groups provided 263 def addTestGroups(testGroups, groupNames) { 264 targetsMap = [:] 265 for (groupName in groupNames) { 266 if (!testGroups.containsKey(groupName)) { 267 println("Group ${group} not found in groups map!") 268 continue 269 } 270 targets = testGroups[groupName] 271 targetsMap.putAll(processTargets(targets)) 272 } 273 return targetsMap 274 } 275 276 def processTestGroups(testGroups) { 277 groupsMap = [:] 278 for (group in testGroups) { 279 groupsMap.put(group.name, group.targets) 280 } 281 return groupsMap 282 } 283 284 def buildTargetClosure(targetName, targetToInvoke) { 285 echo "Building closure for ${targetToInvoke}" 286 return [ (targetName) : { 287 echo "Running target: ${targetToInvoke}" 288 runMakeCommand(targetToInvoke) 289 } 290 ] 291 } 292 293 def runTestTarget(testSuitePath, runParallel = "true", randomize = "true") { 294 return script { 295 sh """ 296 export TEST_SUITES="${testSuitePath}/..." 297 export RANDOMIZE_TESTS=${randomize} 298 export RUN_PARALLEL=${runParallel} 299 cd ${GO_REPO_PATH}/verrazzano/ci/generic 300 make test 301 """ 302 } 303 } 304 305 def runMakeCommand(makeTarget) { 306 sh """ 307 cd ${GO_REPO_PATH}/verrazzano/ci/generic 308 make ${makeTarget} 309 """ 310 } 311 312 def pipelineSetup() { 313 sh """ 314 echo "${NODE_LABELS}" 315 """ 316 317 script { 318 EFFECTIVE_DUMP_K8S_CLUSTER_ON_SUCCESS = getEffectiveDumpOnSuccess() 319 if (params.GIT_COMMIT_TO_USE == "NONE") { 320 echo "Specific GIT commit was not specified, use current head" 321 def scmInfo = checkout scm 322 env.GIT_COMMIT = scmInfo.GIT_COMMIT 323 env.GIT_BRANCH = scmInfo.GIT_BRANCH 324 } else { 325 echo "SCM checkout of ${params.GIT_COMMIT_TO_USE}" 326 def scmInfo = checkout([ 327 $class: 'GitSCM', 328 branches: [[name: params.GIT_COMMIT_TO_USE]], 329 doGenerateSubmoduleConfigurations: false, 330 extensions: [], 331 submoduleCfg: [], 332 userRemoteConfigs: [[url: env.SCM_VERRAZZANO_GIT_URL]]]) 333 env.GIT_COMMIT = scmInfo.GIT_COMMIT 334 env.GIT_BRANCH = scmInfo.GIT_BRANCH 335 // If the commit we were handed is not what the SCM says we are using, fail 336 if (!env.GIT_COMMIT.equals(params.GIT_COMMIT_TO_USE)) { 337 echo "SCM didn't checkout the commit we expected. Expected: ${params.GIT_COMMIT_TO_USE}, Found: ${scmInfo.GIT_COMMIT}" 338 exit 1 339 } 340 } 341 echo "SCM checkout of ${env.GIT_BRANCH} at ${env.GIT_COMMIT}" 342 } 343 344 sh """ 345 cp -f "${NETRC_FILE}" $HOME/.netrc 346 chmod 600 $HOME/.netrc 347 """ 348 349 script { 350 try { 351 sh """ 352 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REPO} -u ${DOCKER_CREDS_USR} --password-stdin 353 """ 354 } catch(error) { 355 echo "docker login failed, retrying after sleep" 356 retry(4) { 357 sleep(30) 358 sh """ 359 echo "${DOCKER_CREDS_PSW}" | docker login ${env.DOCKER_REPO} -u ${DOCKER_CREDS_USR} --password-stdin 360 """ 361 } 362 } 363 } 364 sh """ 365 rm -rf ${GO_REPO_PATH}/verrazzano 366 mkdir -p ${GO_REPO_PATH}/verrazzano 367 tar cf - . | (cd ${GO_REPO_PATH}/verrazzano/ ; tar xf -) 368 """ 369 370 script { 371 def props = readProperties file: '.verrazzano-development-version' 372 VERRAZZANO_DEV_VERSION = props['verrazzano-development-version'] 373 TIMESTAMP = sh(returnStdout: true, script: "date +%Y%m%d%H%M%S").trim() 374 SHORT_COMMIT_HASH = sh(returnStdout: true, script: "git rev-parse --short=8 HEAD").trim() 375 DOCKER_IMAGE_TAG = "${VERRAZZANO_DEV_VERSION}-${TIMESTAMP}-${SHORT_COMMIT_HASH}" 376 // update the description with some meaningful info 377 setDisplayName() 378 currentBuild.description = params.KUBERNETES_CLUSTER_VERSION + " : " + SHORT_COMMIT_HASH + " : " + env.GIT_COMMIT + " : " + params.GIT_COMMIT_TO_USE 379 } 380 } 381 382 def postFailureProcessing() { 383 sh """ 384 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o ${WORKSPACE}/build-console-output.log ${BUILD_URL}consoleText 385 """ 386 archiveArtifacts artifacts: '**/build-console-output.log', allowEmptyArchive: true 387 sh """ 388 curl -k -u ${JENKINS_READ_USR}:${JENKINS_READ_PSW} -o archive.zip ${BUILD_URL}artifact/*zip*/archive.zip 389 oci --region us-phoenix-1 os object put --force --namespace ${OCI_OS_NAMESPACE} -bn ${OCI_OS_ARTIFACT_BUCKET} --name ${env.JOB_NAME}/${env.BRANCH_NAME}/${env.BUILD_NUMBER}/archive.zip --file archive.zip 390 rm archive.zip 391 """ 392 script { 393 if (env.BRANCH_NAME == "master" || env.BRANCH_NAME ==~ "release-.*" || env.BRANCH_NAME ==~ "mark/*") { 394 slackSend ( message: "Job Failed - \"${env.JOB_NAME}\" build: ${env.BUILD_NUMBER}\n\nView the log at:\n ${env.BUILD_URL}\n\nBlue Ocean:\n${env.RUN_DISPLAY_URL}" ) 395 } 396 } 397 } 398 399 def getEffectiveDumpOnSuccess() { 400 def effectiveValue = params.DUMP_K8S_CLUSTER_ON_SUCCESS 401 if (FORCE_DUMP_K8S_CLUSTER_ON_SUCCESS.equals("true") && (env.BRANCH_NAME.equals("master"))) { 402 effectiveValue = true 403 echo "Forcing dump on success based on global override setting" 404 } 405 return effectiveValue 406 } 407 408 def setDisplayName() { 409 echo "Start setDisplayName" 410 def causes = currentBuild.getBuildCauses() 411 echo "causes: " + causes.toString() 412 for (cause in causes) { 413 def causeString = cause.toString() 414 echo "current cause: " + causeString 415 if (causeString.contains("UpstreamCause") && causeString.contains("Started by upstream project")) { 416 echo "This job was caused by " + causeString 417 if (causeString.contains("verrazzano-periodic-triggered-tests")) { 418 currentBuild.displayName = env.BUILD_NUMBER + " : PERIODIC" 419 } else if (causeString.contains("verrazzano-flaky-tests")) { 420 currentBuild.displayName = env.BUILD_NUMBER + " : FLAKY" 421 } 422 } 423 } 424 echo "End setDisplayName" 425 } 426 427 def dumpK8sCluster(dumpDirectory) { 428 sh """ 429 ${GO_REPO_PATH}/verrazzano/ci/scripts/capture_cluster_snapshot.sh ${dumpDirectory} 430 """ 431 }