diff --git a/core/src/main/scala/org/apache/spark/Aggregator.scala b/core/src/main/scala/org/apache/spark/Aggregator.scala
index d43ef6c430cb1..43ab09004fabf 100644
--- a/core/src/main/scala/org/apache/spark/Aggregator.scala
+++ b/core/src/main/scala/org/apache/spark/Aggregator.scala
@@ -17,16 +17,17 @@
package org.apache.spark
+import org.apache.spark.annotations.DeveloperAPI
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}
/**
- * Developer API
* A set of functions used to aggregate data.
*
* @param createCombiner function to create the initial value of the aggregation.
* @param mergeValue function to merge a new value into the aggregation result.
* @param mergeCombiners function to merge outputs from multiple mergeValue function.
*/
+@DeveloperAPI
case class Aggregator[K, V, C] (
createCombiner: V => C,
mergeValue: (C, V) => C,
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index dd4833102b72e..2884037e7fb30 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -54,8 +54,7 @@ import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerTy
* @param config a Spark Config object describing the application configuration. Any settings in
* this config overrides the default configs as well as system properties.
*/
-class SparkContext(config: SparkConf)
- extends Logging {
+class SparkContext(config: SparkConf) extends Logging {
// This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
// etc) too. This is typically generated from InputFormatInfo.computePreferredLocations. It
diff --git a/core/src/main/scala/org/apache/spark/TaskEndReason.scala b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
index 626f1260cff04..8dacf9cddd3f3 100644
--- a/core/src/main/scala/org/apache/spark/TaskEndReason.scala
+++ b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
@@ -17,16 +17,16 @@
package org.apache.spark
+import org.apache.spark.annotations.DeveloperAPI
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.storage.BlockManagerId
/**
- * Developer API
* Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
* tasks several times for "ephemeral" failures, and only report back failures that require some
* old stages to be resubmitted, such as shuffle map fetch failures.
*/
-
+@DeveloperAPI
sealed trait TaskEndReason
/** Developer API */
diff --git a/core/src/main/scala/org/apache/spark/annotations/DeveloperAPI.java b/core/src/main/scala/org/apache/spark/annotations/DeveloperAPI.java
new file mode 100644
index 0000000000000..ae04a4a63b9fd
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/annotations/DeveloperAPI.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.annotations;
+
+import java.lang.annotation.*;
+
+@Retention(RetentionPolicy.SOURCE)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
+ ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
+public @interface DeveloperAPI {}
diff --git a/core/src/main/scala/org/apache/spark/annotations/Experimental.java b/core/src/main/scala/org/apache/spark/annotations/Experimental.java
new file mode 100644
index 0000000000000..58445cc2c1e86
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/annotations/Experimental.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.annotations;
+
+import java.lang.annotation.*;
+
+@Retention(RetentionPolicy.SOURCE)
+@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
+ ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
+public @interface Experimental {}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
index 419cd96376c04..8d2b752078a91 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
@@ -18,11 +18,12 @@
package org.apache.spark.scheduler
import org.apache.spark.storage.RDDInfo
+import org.apache.spark.annotations.DeveloperAPI
/**
- * Developer API
* Stores information about a stage to pass from the scheduler to SparkListeners.
*/
+@DeveloperAPI
class StageInfo(val stageId: Int, val name: String, val numTasks: Int, val rddInfo: RDDInfo) {
/** When this stage was submitted from the DAGScheduler to a TaskScheduler. */
var submissionTime: Option[Long] = None
diff --git a/docs/_config.yml b/docs/_config.yml
index aa5a5adbc1743..5e69c5626f131 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -1,5 +1,5 @@
pygments: true
-markdown: kramdown
+markdown: rdiscount
# These allow the documentation to be updated with nerw releases
# of Spark, Scala, and Mesos.
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index d4ec15bb7d6fa..786d3ef6f09a9 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -51,6 +51,11 @@
puts "cp -r " + source + "/. " + dest
cp_r(source + "/.", dest)
+ # Append custom JavaScript
+ js = File.readlines("./js/api-docs.js")
+ js_file = dest + "/lib/template.js"
+ File.open(js_file, 'a') { |f| f.write("\n" + js.join()) }
+
# Append custom CSS
css_file = dest + "/lib/template.css"
extra_css = [
@@ -62,19 +67,19 @@
File.open(css_file, 'a') { |f| f.write(extra_css) }
end
- # Build Epydoc for Python
- puts "Moving to python directory and building epydoc."
- cd("../python")
- puts `epydoc --config epydoc.conf`
-
- puts "Moving back into docs dir."
- cd("../docs")
-
- puts "echo making directory pyspark"
- mkdir_p "pyspark"
-
- puts "cp -r ../python/docs/. api/pyspark"
- cp_r("../python/docs/.", "api/pyspark")
-
- cd("..")
+# # Build Epydoc for Python
+# puts "Moving to python directory and building epydoc."
+# cd("../python")
+# puts `epydoc --config epydoc.conf`
+#
+# puts "Moving back into docs dir."
+# cd("../docs")
+#
+# puts "echo making directory pyspark"
+# mkdir_p "pyspark"
+#
+# puts "cp -r ../python/docs/. api/pyspark"
+# cp_r("../python/docs/.", "api/pyspark")
+#
+# cd("..")
end
diff --git a/docs/js/api-docs.js b/docs/js/api-docs.js
new file mode 100644
index 0000000000000..ee63d611a18ec
--- /dev/null
+++ b/docs/js/api-docs.js
@@ -0,0 +1,5 @@
+/* Dynamically injected post-processing code for the API docs */
+
+$(document).ready(function() {
+ console.log("Ready")
+});