Skip to content

Instantly share code, notes, and snippets.

@frgomes
Last active April 10, 2022 12:47
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save frgomes/31203360577ce6f4afa3753d5367a9cb to your computer and use it in GitHub Desktop.
Save frgomes/31203360577ce6f4afa3753d5367a9cb to your computer and use it in GitHub Desktop.
SBT - multiproject build with projectmatrix and additional configurations
ThisBuild / organization := "com.example"
val artifactBase = "pkgs.dev.azure.com"
val artifactTree = "SomeTree"
val artifactOrganization = "SomeCompanyName.${artifactBase}"
val artifactGroup = "SomeDepartmentName"
val artifactProject = "SomeProjectName"
val artifactURL = s"https://${artifactOrganization}/${artifactGroup}/_packaging/${artifactProject}/maven/v1"
////
//
// IMPORTANT :: READ CAREFULLY AND PERFORM THESE ACTIONS BEFORE ATTEMPTING TO BUILD THIS PROJECT
// =============================================================================================
//
// 1. Create a Personal Access Token as explained below:
//
// https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate
//
// Make sure that you create a token with PACKAGING READ/WRITE SCOPE.
//
// 2. Create an environment variable based on values defined above:
//
// $ set artifactProject=SomeProjectName ## value copied from variables on top of this file
//
// 3. Create file ${HOME}/.sbt/.credentials/pkgs.dev.azure.com like this:
//
// $ mkdir -p ${HOME}/.sbt/.credentials/${artifactBase}/${artifactTree}
// $ chmod 700 -r ${HOME}/.sbt/.credentials
// $ touch ${HOME}/.sbt/.credentials/${artifactBase}/${artifactTree}/${artifactProject}
//
// 4. See below an EXAMPLE of ${HOME}/.sbt/.credentials/${artifactBase}/${artifactTree}/${artifactProject}
//
// realm=
// host=pkgs.dev.azure.com
// user=richard.gomes@example.com
// password=this_is_my_personal_access_token_obtained_at_step_1_of_these_instructions
/////
ThisBuild / credentials += findCredentials
def findCredentials: Credentials = {
sys.env.get("SYSTEM_ACCESSTOKEN") match {
case Some(key) => Credentials("", artifactBase, artifactTree, key)
case None => Credentials(Path.userHome / ".sbt" / ".credentials" / artifactBase / artifactTree / artifactProject")
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
import ProjectSyntax._
ThisBuild / versionScheme := Some("early-semver")
ThisBuild / versionPolicyIntention := Compatibility.BinaryAndSourceCompatible
ThisBuild / publishMavenStyle := true
ThisBuild / publishConfiguration := publishConfiguration.value.withOverwrite(false)
ThisBuild / publishLocalConfiguration := publishLocalConfiguration.value.withOverwrite(true)
ThisBuild / publishTo := Some(artifactProject at artifactURL)
ThisBuild / assembly / assemblyMergeStrategy := {
case PathList("META-INF", xs@_*) =>
(xs map {
_.toLowerCase
}) match {
case ("manifest.mf" :: Nil) | ("index.list" :: Nil) | ("dependencies" :: Nil) => MergeStrategy.discard
case _ => MergeStrategy.discard
}
case _ => MergeStrategy.first
}
Global / excludeLintKeys += ThisBuild / assembly / assemblyMergeStrategy
initialize := {
val _ = initialize.value // run the previous initialization
val required = "1.8"
val current = sys.props("java.specification.version")
assert(current == required, s"Unsupported JDK: java.specification.version $current != $required")
}
def buildinfoSettings: Seq[Setting[_]] =
Seq(
buildInfoPackage := s"${organization.value}.${name.value}".replace("-", "."),
buildInfoKeys := Seq[BuildInfoKey](organization, name, version, scalaVersion, sbtVersion))
def commonDependencies: Seq[Setting[_]] =
Seq(
libraryDependencies ++=
Seq(
//Enumeratum already a transitive dependency pulled by CDM Framework.
"com.beachape" %% "enumeratum" % "1.7.0"))
def spark311Dependencies: Seq[Setting[_]] =
Seq(
libraryDependencies ++=
Seq(
"org.apache.spark" %% "spark-sql" % "3.1.1"))
def spark321Dependencies: Seq[Setting[_]] =
Seq(
libraryDependencies ++=
Seq(
"org.apache.spark" %% "spark-sql" % "3.2.1"))
def testDependencies: Seq[Setting[_]] =
Seq(
testFrameworks += new TestFramework("utest.runner.Framework"),
libraryDependencies ++=
Seq(
"com.lihaoyi" %% "utest" % "0.7.10" % "test;it;tools"))
def compileSettings: Seq[Setting[_]] =
Seq(
compileOrder := CompileOrder.JavaThenScala,
scalacOptions ++= Seq("-unchecked", "-deprecation", "-Wconf:any:error"))
def testSettings: Seq[Setting[_]] =
Defaults.itSettings ++
Seq(
IntegrationTest/dependencyClasspath := (IntegrationTest/dependencyClasspath).value ++ (Test/exportedProducts).value)
def disablePublishing: Seq[Setting[_]] =
Seq(
publish/skip := true,
publishLocal/skip := true,
)
val scala212 = "2.12.15"
val scala213 = "2.13.7"
lazy val root =
(project in file("."))
.withDefaultConfigurations
.settings(compileSettings)
.settings(testSettings)
.settings(commonDependencies)
.settings(testDependencies)
.settings(Seq(scalaVersion := scala212):_*)
.settings(spark311Dependencies) //FIXME: should be: .settings(spark321Dependencies)
.settings(buildinfoSettings).enablePlugins(BuildInfoPlugin)
//NOTE: // Only needed if multiple targets is desired.
//NOTE: // Due to limiteation of project-matrix (a SBT plugin), all sources would have to be relocated to subdirectories.
//NOTE: lazy val root =
//NOTE: (project in file("."))
//NOTE: .settings(disablePublishing:_*)
//NOTE: .aggregate(core.projectRefs:_*)
//NOTE:
//NOTE: lazy val core =
//NOTE: (projectMatrix in file("core"))
//NOTE: .configs(IntegrationTest)
//NOTE: .settings(compileSettings)
//NOTE: .settings(testSettings)
//NOTE: .settings(commonDependencies)
//NOTE: .settings(testDependencies)
//NOTE: .customRow(
//NOTE: scalaVersions = Seq(scala212),
//NOTE: axisValues = Seq(SparkAxis.v311, VirtualAxis.jvm),
//NOTE: _.settings(moduleName := name.value + SparkAxis.v311.idSuffix, spark311Dependencies))
//NOTE: .customRow(
//NOTE: scalaVersions = Seq(scala212, scala213),
//NOTE: axisValues = Seq(SparkAxis.v321, VirtualAxis.jvm),
//NOTE: _.settings(moduleName := name.value + SparkAxis.v321.idSuffix, spark321Dependencies))
//file: project/Configs.scala
import sbt._
object Configs {
val FunctionalTest = config("ft") extend (Test)
val AcceptanceTest = config("at") extend (Test)
val PerformanceTest = config("pt") extend (Test)
val Tools = config("tools") extend (Test)
}
//file: project/plugins.sbt
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.9.0")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.2.0")
addSbtPlugin("ch.epfl.scala" % "sbt-version-policy" % "2.0.1")
//file: project/ProjectSyntax.scala
object ProjectSyntax {
import sbt._
import sbt.Keys._
import Configs._
val mainDependencyManagement = "compile->compile;test->compile,test;it->compile,test;at->compile,test;ft->compile,test;pt->compile,test;tools->compile,test"
val testDependencyManagement = "compile;test;it;ft;at;pt;tools"
implicit class ImplicitProjectSyntax(project: sbt.Project) {
implicit val withDefaultConfigurations: sbt.Project =
project
.configs(Compile, Test, IntegrationTest, AcceptanceTest, FunctionalTest, PerformanceTest, Tools)
.settings(testSettings: _*)
.settings(itSettings: _*)
.settings(atSettings: _*)
.settings(ftSettings: _*)
.settings(ptSettings: _*)
.settings(toolsSettings: _*)
implicit val withTestConfiguration: sbt.Project =
project
.configs(Compile, Test)
.settings(testSettings: _*)
implicit val withIntegrationTestConfiguration: sbt.Project =
project
.configs(Compile, Test, IntegrationTest)
.settings(testSettings: _*)
.settings(itSettings: _*)
implicit val withAcceptanceTestConfiguration: sbt.Project =
project
.configs(Compile, Test, IntegrationTest, AcceptanceTest)
.settings(testSettings: _*)
.settings(itSettings: _*)
.settings(atSettings: _*)
implicit val withFunctionalTestConfiguration: sbt.Project =
project
.configs(Compile, Test, IntegrationTest, AcceptanceTest, FunctionalTest)
.settings(testSettings: _*)
.settings(itSettings: _*)
.settings(atSettings: _*)
.settings(ftSettings: _*)
implicit val withPerformanceTestConfiguration: sbt.Project =
project
.configs(Compile, Test, IntegrationTest, AcceptanceTest, FunctionalTest, PerformanceTest)
.settings(testSettings: _*)
.settings(itSettings: _*)
.settings(atSettings: _*)
.settings(ftSettings: _*)
.settings(ptSettings: _*)
}
def additionalTestFrameworks: Seq[Setting[_]] =
Seq(
testFrameworks += new TestFramework("scalaprops.ScalapropsFramework"),
testFrameworks += new TestFramework("utest.runner.Framework",
))
def testSettings: Seq[Setting[_]] =
inConfig(Test)(Defaults.testSettings ++ additionalTestFrameworks)
def itSettings: Seq[Setting[_]] =
inConfig(IntegrationTest)(Defaults.testSettings ++ Defaults.itSettings ++ additionalTestFrameworks ++
Seq(
unmanagedSourceDirectories ++= (Test / sourceDirectories ).value,
unmanagedResourceDirectories ++= (Test / resourceDirectories).value,
))
def atSettings: Seq[Setting[_]] =
inConfig(AcceptanceTest)(Defaults.testSettings ++ additionalTestFrameworks ++
Seq(
unmanagedSourceDirectories ++= (Test / sourceDirectories ).value,
unmanagedResourceDirectories ++= (Test / resourceDirectories).value,
))
def ftSettings: Seq[Setting[_]] =
inConfig(FunctionalTest)(Defaults.testSettings ++ additionalTestFrameworks ++
Seq(
unmanagedSourceDirectories ++= (Test / sourceDirectories ).value,
unmanagedResourceDirectories ++= (Test / resourceDirectories).value,
))
def ptSettings: Seq[Setting[_]] =
inConfig(PerformanceTest)(Defaults.testSettings ++ additionalTestFrameworks ++
Seq(
unmanagedSourceDirectories ++= (Test / sourceDirectories ).value,
unmanagedResourceDirectories ++= (Test / resourceDirectories).value,
))
def toolsSettings: Seq[Setting[_]] =
inConfig(Tools)(Defaults.testSettings ++ Classpaths.configSettings ++ additionalTestFrameworks ++
Seq(
unmanagedSourceDirectories ++= (Test / sourceDirectories ).value,
unmanagedResourceDirectories ++= (Test / resourceDirectories).value,
))
def inPlaceTests(c: Configuration): Seq[Setting[_]] = forkSettings(c, false, false)
def forkedTests(c: Configuration): Seq[Setting[_]] = forkSettings(c, true, false)
def parallelTests(c: Configuration): Seq[Setting[_]] = forkSettings(c, true, true)
def forkSettings(c: Configuration, forked: Boolean, parallel: Boolean): Seq[Setting[_]] =
inConfig(c)(
Seq(
fork := forked,
parallelExecution := parallel))
}
//file: project/SparkAxis.scala
import sbt._
case class SparkAxis(idSuffix: String, directorySuffix: String) extends VirtualAxis.WeakAxis
object SparkAxis {
val v311 = SparkAxis("_311", "spark_3.1.1")
val v321 = SparkAxis("_320", "spark_3.2.1")
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment