github.com/apache/beam/sdks/v2@v2.48.2/java/testing/load-tests/build.gradle (about) 1 /* 2 * Licensed to the Apache Software Foundation (ASF) under one 3 * or more contributor license agreements. See the NOTICE file 4 * distributed with this work for additional information 5 * regarding copyright ownership. The ASF licenses this file 6 * to you under the Apache License, Version 2.0 (the 7 * License); you may not use this file except in compliance 8 * with the License. You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an AS IS BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 plugins { id 'org.apache.beam.module' } 20 applyJavaNature( 21 publish: false, 22 archivesBaseName: 'beam-sdks-java-load-tests', 23 exportJavadoc: false 24 ) 25 26 description = "Apache Beam :: SDKs :: Java :: Load Tests" 27 28 29 def mainClassProperty = "loadTest.mainClass" 30 31 // When running via Gradle, this property can be used to pass commandline arguments 32 // to the load-tests launch 33 def loadTestArgsProperty = "loadTest.args" 34 35 // When running via Gradle, this property sets the runner dependency 36 def runnerProperty = "runner" 37 def runnerDependency = (project.hasProperty(runnerProperty) 38 ? project.getProperty(runnerProperty) 39 : ":runners:direct-java") 40 def loadTestRunnerVersionProperty = "runner.version" 41 def loadTestRunnerVersion = project.findProperty(loadTestRunnerVersionProperty) 42 def isSparkRunner = runnerDependency.startsWith(":runners:spark:") 43 def isDataflowRunner = ":runners:google-cloud-dataflow-java".equals(runnerDependency) 44 def isDataflowRunnerV2 = isDataflowRunner && "V2".equals(loadTestRunnerVersion) 45 def runnerConfiguration = ":runners:direct-java".equals(runnerDependency) ? "shadow" : null 46 47 if (isDataflowRunner) { 48 /* 49 * We need to rely on manually specifying these evaluationDependsOn to ensure that 50 * the following projects are evaluated before we evaluate this project. This is because 51 * we are attempting to reference a property from the project directly. 52 */ 53 if (isDataflowRunnerV2) { 54 evaluationDependsOn(":runners:google-cloud-dataflow-java") 55 } else { 56 evaluationDependsOn(":runners:google-cloud-dataflow-java:worker") 57 } 58 } 59 60 configurations { 61 // A configuration for running the Load testlauncher directly from Gradle, which 62 // uses Gradle to put the appropriate dependencies on the Classpath rather than 63 // bundling them into a fat jar 64 gradleRun 65 } 66 67 dependencies { 68 implementation enforcedPlatform(library.java.google_cloud_platform_libraries_bom) 69 70 implementation library.java.kafka_clients 71 implementation project(path: ":sdks:java:core", configuration: "shadow") 72 implementation project(":sdks:java:io:synthetic") 73 implementation project(":sdks:java:testing:test-utils") 74 implementation project(":sdks:java:io:google-cloud-platform") 75 implementation project(":sdks:java:io:kafka") 76 implementation project(":sdks:java:io:kinesis") 77 implementation library.java.aws_java_sdk_core 78 implementation library.java.google_cloud_core 79 implementation library.java.joda_time 80 implementation library.java.vendored_guava_26_0_jre 81 implementation library.java.slf4j_api 82 83 gradleRun project(project.path) 84 gradleRun project(path: runnerDependency, configuration: runnerConfiguration) 85 } 86 87 if (isSparkRunner) { 88 configurations.gradleRun { 89 // Using Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath 90 exclude group: "org.slf4j", module: "slf4j-jdk14" 91 } 92 } 93 94 def getLoadTestArgs = { 95 def loadTestArgs = project.findProperty(loadTestArgsProperty) ?: "" 96 def loadTestArgsList = new ArrayList<String>() 97 Collections.addAll(loadTestArgsList as Collection<? super Collection>, loadTestArgs.split()) 98 99 if (isDataflowRunner) { 100 if (isDataflowRunnerV2) { 101 loadTestArgsList.add("--experiments=beam_fn_api,use_unified_worker,use_runner_v2,shuffle_mode=service") 102 def sdkContainerImage = project.findProperty('sdkContainerImage') ?: project(":runners:google-cloud-dataflow-java").dockerJavaImageName 103 loadTestArgsList.add("--sdkContainerImage=${sdkContainerImage}") 104 } else { 105 def dataflowWorkerJar = project.findProperty('dataflowWorkerJar') ?: project(":runners:google-cloud-dataflow-java:worker").shadowJar.archivePath 106 // Provide job with a customizable worker jar. 107 // With legacy worker jar, containerImage is set to empty (i.e. to use the internal build). 108 // More context and discussions can be found in PR#6694. 109 loadTestArgsList.add("--dataflowWorkerJar=${dataflowWorkerJar}") 110 loadTestArgsList.add("--workerHarnessContainerImage=") 111 } 112 } 113 return loadTestArgsList 114 } 115 116 task run(type: JavaExec) { 117 def loadTestArgsList = getLoadTestArgs() 118 if (isDataflowRunner) { 119 if (isDataflowRunnerV2){ 120 dependsOn ":runners:google-cloud-dataflow-java:buildAndPushDockerJavaContainer" 121 finalizedBy ":runners:google-cloud-dataflow-java:cleanUpDockerJavaImages" 122 } else { 123 dependsOn ":runners:google-cloud-dataflow-java:worker:shadowJar" 124 } 125 } 126 127 if(isSparkRunner){ 128 // Disable UI 129 systemProperty "spark.ui.enabled", "false" 130 systemProperty "spark.ui.showConsoleProgress", "false" 131 } 132 133 mainClass = project.findProperty(mainClassProperty) 134 classpath = configurations.gradleRun 135 args loadTestArgsList.toArray() 136 }