|
| 1 | +package sbtgenclasspath |
| 2 | + |
| 3 | +import sbt._, Keys._ |
| 4 | +import sbtsparkpackage.SparkPackagePlugin.autoImport._ |
| 5 | +import libdeps.LibVers._ |
| 6 | + |
| 7 | +object GenClasspathPlugin extends sbt.AutoPlugin { |
| 8 | + |
| 9 | + object autoImport { |
| 10 | + |
| 11 | + lazy val genClasspath = taskKey[Unit]("Build runnable script with classpath") |
| 12 | + lazy val extraSparkSubmitModules = settingKey[Seq[ModuleID]]("Additional spark submit jar dependencies") |
| 13 | + |
| 14 | + lazy val genClasspathSettings: Seq[Def.Setting[_]] = Seq( |
| 15 | + |
| 16 | + extraSparkSubmitModules := Seq.empty[ModuleID], |
| 17 | + |
| 18 | + genClasspath := { |
| 19 | + import java.io.PrintWriter |
| 20 | + |
| 21 | + val sbtPathRoot = baseDirectory.value / ".sbt.paths" |
| 22 | + sbtPathRoot.mkdirs() |
| 23 | + |
| 24 | + def writeClasspath(cpType: String)(R: => String): Unit = { |
| 25 | + val fout = new PrintWriter((sbtPathRoot / s"SBT_${cpType}_CLASSPATH").toString) |
| 26 | + println(s"Building ${cpType} classpath for current project") |
| 27 | + try fout.write(R) finally fout.close() |
| 28 | + } |
| 29 | + |
| 30 | + writeClasspath("RUNTIME") { |
| 31 | + (fullClasspath in Runtime).value.files.map(_.toString).mkString(":") |
| 32 | + } |
| 33 | + |
| 34 | + writeClasspath("SPARK_PACKAGE") { |
| 35 | + import scala.util.matching.Regex |
| 36 | + val patt = s"(.+?)/(.+?):(.+?)(-s_${scalaMajorVer})?".r |
| 37 | + val pkgs = (spDependencies.value).map { _ match { |
| 38 | + case patt(orgName, pkgName, pkgVer, stem, _*) => |
| 39 | + if (null != stem) { |
| 40 | + println(s"org ${orgName}, pkg ${pkgName}, ver ${pkgVer}, ${stem}") |
| 41 | + s"${pkgName}-${pkgVer}${stem}.jar" |
| 42 | + } else { |
| 43 | + println(s"org ${orgName}, pkg ${pkgName}, ver ${pkgVer}") |
| 44 | + s"${pkgName}-${pkgVer}.jar" |
| 45 | + } |
| 46 | + }}.toSet |
| 47 | + |
| 48 | + // TODO: not knowing the proper way, I just fall back to Regex |
| 49 | + val extraSpModIds = (extraSparkSubmitModules in Compile).value.flatMap { mod => |
| 50 | + //"com.typesafe.scala-logging:scala-logging-api:2.1.2" |
| 51 | + // scala-logging-api_2.11-2.1.2.jar |
| 52 | + val patt = s"(.+?):(.+?):(.+?)".r |
| 53 | + mod.toString match { |
| 54 | + case patt(orgName, pkgName, pkgVer) => |
| 55 | + Seq(s"${pkgName}_${scalaMajorVer}-${pkgVer}.jar", s"${pkgName}-${pkgVer}.jar") |
| 56 | + } |
| 57 | + }.toSet |
| 58 | + |
| 59 | + (fullClasspath in Compile).value.files.filter { cpFile => |
| 60 | + val cpName = cpFile.getName |
| 61 | + println(cpName) |
| 62 | + (pkgs contains cpName) || (extraSpModIds contains cpName) |
| 63 | + }.map(_.toString).mkString(":") |
| 64 | + } |
| 65 | + } |
| 66 | + ) |
| 67 | + } |
| 68 | + import autoImport._ |
| 69 | + |
| 70 | + override def requires = sbt.plugins.JvmPlugin |
| 71 | + |
| 72 | + // This plugin is automatically enabled for projects which are JvmPlugin. |
| 73 | + override def trigger = allRequirements |
| 74 | + |
| 75 | + // a group of settings that are automatically added to projects. |
| 76 | + override val projectSettings = |
| 77 | + inConfig(Compile)(genClasspathSettings) ++ inConfig(Test)(genClasspathSettings) |
| 78 | +} |
0 commit comments