github.com/treeverse/lakefs@v1.24.1-0.20240520134607-95648127bfb0/clients/spark/build.sbt (about)

     1  lazy val projectVersion = "0.13.0"
     2  version := projectVersion
     3  lazy val hadoopVersion = "3.2.1"
     4  ThisBuild / isSnapshot := false
     5  ThisBuild / scalaVersion := "2.12.12"
     6  
     7  name := "lakefs-spark-client"
     8  organization := "io.lakefs"
     9  organizationName := "Treeverse Labs"
    10  organizationHomepage := Some(url("http://treeverse.io"))
    11  description := "Spark client for lakeFS object metadata."
    12  licenses := List(
    13    "Apache 2" -> url("http://www.apache.org/licenses/LICENSE-2.0.txt")
    14  )
    15  homepage := Some(url("https://lakefs.io"))
    16  
    17  javacOptions ++= Seq("-source", "1.11", "-target", "1.8")
    18  scalacOptions += "-target:jvm-1.8"
    19  semanticdbEnabled := true // enable SemanticDB
    20  semanticdbVersion := scalafixSemanticdb.revision
    21  scalacOptions += "-Ywarn-unused-import"
    22  Compile / PB.includePaths += (Compile / resourceDirectory).value
    23  Compile / PB.protoSources += (Compile / resourceDirectory).value
    24  Compile / PB.targets := Seq(
    25    scalapb.gen() -> (Compile / sourceManaged).value / "scalapb"
    26  )
    27  
    28  testFrameworks += new TestFramework("org.scalameter.ScalaMeterFramework")
    29  Test / logBuffered := false
    30  // Uncomment to get accurate benchmarks with just "sbt test".
    31  // Otherwise tell sbt to
    32  //     "testOnly io.treeverse.clients.ReadSSTableBenchmark"
    33  // (or similar).
    34  //
    35  //     Test / parallelExecution := false,
    36  
    37  // Uncomment to get (very) full stacktraces in test:
    38  //      Test / testOptions += Tests.Argument("-oF"),
    39  buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion)
    40  buildInfoPackage := "io.treeverse.clients"
    41  
    42  enablePlugins(S3Plugin, BuildInfoPlugin)
    43  
    44  libraryDependencies ++= Seq(
    45    "io.lakefs" % "sdk" % "1.0.0",
    46    "org.apache.spark" %% "spark-sql" % "3.1.2" % "provided",
    47    "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf",
    48    "org.apache.hadoop" % "hadoop-aws" % hadoopVersion % "provided",
    49    "org.apache.hadoop" % "hadoop-common" % hadoopVersion % "provided",
    50    "org.apache.hadoop" % "hadoop-azure" % hadoopVersion % "provided",
    51    "org.json4s" %% "json4s-native" % "3.6.12",
    52    "org.rogach" %% "scallop" % "4.0.3",
    53    "com.azure" % "azure-core" % "1.10.0",
    54    "com.azure" % "azure-storage-blob" % "12.9.0",
    55    "com.azure" % "azure-storage-blob-batch" % "12.7.0",
    56    "com.azure" % "azure-identity" % "1.2.0",
    57    "com.amazonaws" % "aws-java-sdk-bundle" % "1.12.194" % "provided",
    58    // Snappy is JNI :-(.  However it does claim to work with
    59    // ClassLoaders, and (even more importantly!) using a preloaded JNI
    60    // version will probably continue to work because the C language API
    61    // is quite stable.  Take the version documented in DataBricks
    62    // Runtime 7.6, and note that it changes in 8.3 :-(
    63    "org.xerial.snappy" % "snappy-java" % "1.1.8.4",
    64    "dev.failsafe" % "failsafe" % "3.2.4",
    65    "com.squareup.okhttp3" % "mockwebserver" % "4.10.0" % "test",
    66    "xerces" % "xercesImpl" % "2.12.2" % "test",
    67    "org.scalatest" %% "scalatest" % "3.2.16" % "test",
    68    "org.scalatestplus" %% "scalacheck-1-17" % "3.2.16.0" % "test",
    69    "org.scalatestplus" %% "mockito-4-11" % "3.2.16.0" % "test",
    70    "org.mockito" % "mockito-all" % "1.10.19" % "test",
    71    "com.dimafeng" %% "testcontainers-scala-scalatest" % "0.40.10" % "test",
    72    "com.lihaoyi" %% "upickle" % "1.4.0" % "test",
    73    "com.lihaoyi" %% "os-lib" % "0.7.8" % "test",
    74    // Test with an up-to-date fasterxml.
    75    "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.14.2" % "test",
    76    "com.storm-enroute" %% "scalameter" % "0.19" % "test"
    77  )
    78  
    79  def rename(prefix: String) = ShadeRule.rename(prefix -> "io.lakefs.spark.shade.@0")
    80  
    81  assembly / assemblyShadeRules := Seq(
    82    rename("org.apache.http.**").inAll,
    83    rename("scalapb.**").inAll,
    84    rename("com.google.protobuf.**").inAll,
    85    rename("com.google.common.**")
    86      .inLibrary("com.google.guava" % "guava" % "30.1-jre",
    87                 "com.google.guava" % "failureaccess" % "1.0.1"
    88                )
    89      .inProject,
    90    rename("scala.collection.compat.**").inAll,
    91    rename("okio.**").inAll,
    92    rename("okhttp3.**").inAll,
    93    rename("reactor.netty.**").inAll,
    94    rename("reactor.util.**").inAll
    95  )
    96  
    97  s3Upload / mappings := Seq(
    98    (assembly / assemblyOutputPath).value ->
    99      s"${name.value}/${version.value}/${(assembly / assemblyJarName).value}"
   100  )
   101  s3Upload / s3Host := "treeverse-clients-us-east.s3.amazonaws.com"
   102  s3Upload / s3Progress := true
   103  
   104  assembly / assemblyMergeStrategy := {
   105    case PathList("META-INF", xs @ _*) =>
   106      (xs map { _.toLowerCase }) match {
   107        case ("manifest.mf" :: Nil) | ("index.list" :: Nil) | ("dependencies" :: Nil) =>
   108          MergeStrategy.discard
   109        case ps @ (x :: xs) if ps.last.endsWith(".sf") || ps.last.endsWith(".dsa") =>
   110          MergeStrategy.discard
   111        case "plexus" :: xs =>
   112          MergeStrategy.discard
   113        case "services" :: xs =>
   114          MergeStrategy.filterDistinctLines
   115        case ("spring.schemas" :: Nil) | ("spring.handlers" :: Nil) =>
   116          MergeStrategy.filterDistinctLines
   117        case _ => MergeStrategy.first
   118      }
   119    case _ => MergeStrategy.first
   120  }
   121  
   122  ThisBuild / versionScheme := Some("early-semver")