From 1e8969c56b762938d4d8bdbb6da639ceb3c37b14 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Thu, 29 May 2014 14:26:26 +0530 Subject: [PATCH 1/2] Spark core missed out on Mima settings. So in effect we never tested spark core for mima related errors. --- project/SparkBuild.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 8ef1e91f609fb..d4aa6c2a638f1 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -363,7 +363,8 @@ object SparkBuild extends Build { "org.spark-project" % "pyrolite" % "2.0.1", "net.sf.py4j" % "py4j" % "0.8.1" ), - libraryDependencies ++= maybeAvro + libraryDependencies ++= maybeAvro, + previousArtifact := sparkPreviousArtifact("spark-core") ) // Create a colon-separate package list adding "org.apache.spark" in front of all of them, From 79f3687cf53744f206a4573f3799ef404a671561 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Fri, 30 May 2014 12:15:43 +0530 Subject: [PATCH 2/2] updated Mima to check against version 1.0 --- project/MimaBuild.scala | 32 +++----------------------- project/SparkBuild.scala | 4 ++-- project/project/SparkPluginBuild.scala | 2 -- 3 files changed, 5 insertions(+), 33 deletions(-) diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala index e147be7ddaa61..182ca7615de67 100644 --- a/project/MimaBuild.scala +++ b/project/MimaBuild.scala @@ -31,7 +31,7 @@ object MimaBuild { // Read package-private excludes from file val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes") val excludeFile = file(excludeFilePath) - val packagePrivateList: Seq[String] = + val ignoredClasses: Seq[String] = if (!excludeFile.exists()) { Seq() } else { @@ -60,35 +60,9 @@ object MimaBuild { excludePackage("org.apache.spark." + packageName) } - val packagePrivateExcludes = packagePrivateList.flatMap(excludeClass) + val externalExcludeFileClasses = ignoredClasses.flatMap(excludeClass) - /* Excludes specific to a given version of Spark. When comparing the given version against - its immediate predecessor, the excludes listed here will be applied. */ - val versionExcludes = - SparkBuild.SPARK_VERSION match { - case v if v.startsWith("1.0") => - Seq( - excludeSparkPackage("api.java"), - excludeSparkPackage("mllib"), - excludeSparkPackage("streaming") - ) ++ - excludeSparkClass("rdd.ClassTags") ++ - excludeSparkClass("util.XORShiftRandom") ++ - excludeSparkClass("graphx.EdgeRDD") ++ - excludeSparkClass("graphx.VertexRDD") ++ - excludeSparkClass("graphx.impl.GraphImpl") ++ - excludeSparkClass("graphx.impl.RoutingTable") ++ - excludeSparkClass("graphx.util.collection.PrimitiveKeyOpenHashMap") ++ - excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") ++ - excludeSparkClass("mllib.recommendation.MFDataGenerator") ++ - excludeSparkClass("mllib.optimization.SquaredGradient") ++ - excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++ - excludeSparkClass("mllib.regression.LassoWithSGD") ++ - excludeSparkClass("mllib.regression.LinearRegressionWithSGD") - case _ => Seq() - } - - defaultExcludes ++ packagePrivateExcludes ++ versionExcludes + defaultExcludes ++ externalExcludeFileClasses } def mimaSettings(sparkHome: File) = mimaDefaultSettings ++ Seq( diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index d4aa6c2a638f1..9833411c90b7b 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -32,7 +32,7 @@ import scala.collection.JavaConversions._ // import com.jsuereth.pgp.sbtplugin.PgpKeys._ object SparkBuild extends Build { - val SPARK_VERSION = "1.0.0-SNAPSHOT" + val SPARK_VERSION = "1.1.0-SNAPSHOT" val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "") // Hadoop version to build against. For example, "1.0.4" for Apache releases, or @@ -321,7 +321,7 @@ object SparkBuild extends Build { val excludeServletApi = ExclusionRule(organization = "javax.servlet", artifact = "servlet-api") def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark", - version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = { + version: String = "1.0.0", crossVersion: String = "2.10"): Option[sbt.ModuleID] = { val fullId = if (crossVersion.isEmpty) id else id + "_" + crossVersion Some(organization % fullId % version) // the artifact to compare binary compatibility with } diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala index 0142256e90fb7..e9fba641eb8a1 100644 --- a/project/project/SparkPluginBuild.scala +++ b/project/project/SparkPluginBuild.scala @@ -26,12 +26,10 @@ import sbt.Keys._ object SparkPluginDef extends Build { lazy val root = Project("plugins", file(".")) dependsOn(sparkStyle) lazy val sparkStyle = Project("spark-style", file("spark-style"), settings = styleSettings) - val sparkVersion = "1.0.0-SNAPSHOT" // There is actually no need to publish this artifact. def styleSettings = Defaults.defaultSettings ++ Seq ( name := "spark-style", organization := "org.apache.spark", - version := sparkVersion, scalaVersion := "2.10.4", scalacOptions := Seq("-unchecked", "-deprecation"), libraryDependencies ++= Dependencies.scalaStyle