Skip to content

Commit

Permalink
[SPARK-1971] Update MIMA to compare against Spark 1.0.0
Browse files Browse the repository at this point in the history
Author: Prashant Sharma <[email protected]>

Closes apache#910 from ScrapCodes/enable-mima/spark-core and squashes the following commits:

79f3687 [Prashant Sharma] updated Mima to check against version 1.0
1e8969c [Prashant Sharma] Spark core missed out on Mima settings. So in effect we never tested spark core for mima related errors.
  • Loading branch information
ScrapCodes authored and conviva-zz committed Sep 4, 2014
1 parent 5c6e123 commit d420db7
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 34 deletions.
32 changes: 3 additions & 29 deletions project/MimaBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ object MimaBuild {
// Read package-private excludes from file
val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
val excludeFile = file(excludeFilePath)
val packagePrivateList: Seq[String] =
val ignoredClasses: Seq[String] =
if (!excludeFile.exists()) {
Seq()
} else {
Expand Down Expand Up @@ -60,35 +60,9 @@ object MimaBuild {
excludePackage("org.apache.spark." + packageName)
}

val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass)
val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass)

/* Excludes specific to a given version of Spark. When comparing the given version against
its immediate predecessor, the excludes listed here will be applied. */
val versionExcludes =
SparkBuild.SPARK_VERSION match {
case v if v.startsWith("1.0") =>
Seq(
excludeSparkPackage("api.java"),
excludeSparkPackage("mllib"),
excludeSparkPackage("streaming")
) ++
excludeSparkClass("rdd.ClassTags") ++
excludeSparkClass("util.XORShiftRandom") ++
excludeSparkClass("graphx.EdgeRDD") ++
excludeSparkClass("graphx.VertexRDD") ++
excludeSparkClass("graphx.impl.GraphImpl") ++
excludeSparkClass("graphx.impl.RoutingTable") ++
excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++
excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++
excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
excludeSparkClass("mllib.optimization.SquaredGradient") ++
excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
excludeSparkClass("mllib.regression.LassoWithSGD") ++
excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
case _ => Seq()
}

defaultExcludes ++ packagePrivateExcludes ++ versionExcludes
defaultExcludes ++ externalExcludeFileClasses
}

def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq(
Expand Down
7 changes: 4 additions & 3 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import scala.collection.JavaConversions._
// import com.jsuereth.pgp.sbtplugin.PgpKeys._

object SparkBuild extends Build {
val SPARK_VERSION = "1.0.0-SNAPSHOT"
val SPARK_VERSION = "1.1.0-SNAPSHOT"
val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")

// Hadoop version to build against. For example, "1.0.4" for Apache releases, or
Expand Down Expand Up @@ -321,7 +321,7 @@ object SparkBuild extends Build {
val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api")

def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark",
version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = {
val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion
Some(organization % fullId % version) // the artifact to compare binary compatibility with
}
Expand Down Expand Up @@ -363,7 +363,8 @@ object SparkBuild extends Build {
"org.spark-project" % "pyrolite" % "2.0.1",
"net.sf.py4j" % "py4j" % "0.8.1"
),
libraryDependencies ++= maybeAvro
libraryDependencies ++= maybeAvro,
previousArtifact := sparkPreviousArtifact("spark-core")
)

// Create a colon-separate package list adding "org.apache.spark" in front of all of them,
Expand Down
2 changes: 0 additions & 2 deletions project/project/SparkPluginBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,10 @@ import sbt.Keys._
object SparkPluginDef extends Build {
lazy val root = Project("plugins", file(".")) dependsOn(sparkStyle)
lazy val sparkStyle = Project("spark-style", file("spark-style"), settings = styleSettings)
val sparkVersion = "1.0.0-SNAPSHOT"
// There is actually no need to publish this artifact.
def styleSettings = Defaults.defaultSettings ++ Seq (
name := "spark-style",
organization := "org.apache.spark",
version := sparkVersion,
scalaVersion := "2.10.4",
scalacOptions := Seq("-unchecked", "-deprecation"),
libraryDependencies ++= Dependencies.scalaStyle
Expand Down

0 comments on commit d420db7

Please sign in to comment.