github.com/treeverse/lakefs@v1.24.1-0.20240520134607-95648127bfb0/test/lakefsfs/build.sbt (about) 1 import build.BuildType 2 3 val baseName = "lakefsfs-test" 4 5 val projectVersion = "0.1.0" 6 7 // Spark versions 2.4.7 and 3.0.1 use different Scala versions. Changing this is a deep 8 // change, so key the Spark distinction by the Scala distinction. sbt doesn't appear to 9 // support other ways of changing emitted Scala binary versions using the same compiler. 10 11 // SO https://stackoverflow.com/a/60177627/192263 hints that we cannot use 2.11 here before 12 // this version 13 val scala211Version = "2.11.12" 14 val scala212Version = "2.12.12" 15 16 def settingsToCompileIn() = { 17 Seq( 18 Compile / scalaSource := (ThisBuild / baseDirectory).value / "src" / "main" / "scala", 19 ) 20 } 21 22 def generateProject(buildType: BuildType) = 23 Project(s"${baseName}-${buildType.name}", file(s"target/${baseName}-${buildType.name}")) 24 .settings( 25 sharedSettings, 26 s3UploadSettings, 27 settingsToCompileIn(), 28 scalaVersion := buildType.scalaVersion, 29 libraryDependencies ++= Seq( 30 // We link directly with our hadoop-lakefs in order to have access to our version of the api-client 31 // the same version bundled into our filesystem, this version already shade all the right dependencies like the gson 32 // package. 33 // In case of using the lakefs client library, we will need to shade the required libraries by the client api as 34 // the hadoop-lakefs doesn't shade the client and also use it. 35 "io.lakefs" % "hadoop-lakefs-assembly" % "0.1.0", 36 "org.apache.spark" %% "spark-sql" % buildType.sparkVersion % "provided", 37 "org.apache.hadoop" % "hadoop-aws" % buildType.hadoopVersion, 38 "org.apache.hadoop" % "hadoop-common" % buildType.hadoopVersion, 39 ), 40 target := { baseDirectory.value / "target" / s"${baseName}-${buildType.name}" } 41 ).enablePlugins(S3Plugin) 42 43 val spark2Type = new BuildType("247", scala211Version, "2.4.7", "2.7.7") 44 val spark3Type = new BuildType("301", scala212Version, "3.0.1", "2.7.7") 45 46 lazy val proj2 = generateProject(spark2Type) 47 lazy val proj3 = generateProject(spark3Type) 48 49 lazy val root = (project in file(".")) 50 .aggregate(proj2, proj3) 51 .settings( 52 compile / skip := true, 53 assembly / skip := true, 54 publish / skip := true, 55 ) 56 57 58 lazy val assemblySettings = Seq( 59 assembly / assemblyMergeStrategy := (_ => MergeStrategy.first), 60 assembly / assemblyShadeRules := Seq( 61 ShadeRule.rename("okio.**" -> "io.lakefs.test.shade.@0").inAll, 62 ShadeRule.rename("okhttp3.**" -> "io.lakefs.test.shade.@0").inAll, 63 ShadeRule.rename("scala.collection.compat.**" -> "io.lakefs.test.shade.@0").inAll, 64 ), 65 ) 66 67 // Upload assembly jars to S3 68 lazy val s3UploadSettings = Seq( 69 s3Upload / mappings := Seq( 70 (assemblyOutputPath in assembly).value -> 71 s"${name.value}/${version.value}/${(assemblyJarName in assembly).value}" 72 ), 73 s3Upload / s3Host := "treeverse-clients-us-east.s3.amazonaws.com", 74 s3Upload / s3Progress := true 75 ) 76 77 lazy val commonSettings = Seq( 78 version := projectVersion, 79 // Use an older JDK to be Spark compatible 80 javacOptions ++= Seq("-source", "1.8", "-target", "1.8"), 81 scalacOptions += "-target:jvm-1.8" 82 ) 83 84 lazy val sharedSettings = commonSettings ++ assemblySettings 85 86 ThisBuild / organization := "io.treeverse" 87 ThisBuild / organizationName := "Treeverse Labs" 88 ThisBuild / organizationHomepage := Some(url("http://treeverse.io")) 89 ThisBuild / description := "Spark test app for lakeFS filesystem." 90 ThisBuild / licenses := List("Apache 2" -> new URL("http://www.apache.org/licenses/LICENSE-2.0.txt")) 91 ThisBuild / homepage := Some(url("https://github.com/treeverse/lakeFS"))