Skip to content

Commit 824011b

Browse files
committed
Add support for injecting arbitrary JavaScript to API docs
1 parent 037755c commit 824011b

File tree

9 files changed

+83
-22
lines changed

9 files changed

+83
-22
lines changed

core/src/main/scala/org/apache/spark/Aggregator.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,17 @@
1717

1818
package org.apache.spark
1919

20+
import org.apache.spark.annotations.DeveloperAPI
2021
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}
2122

2223
/**
23-
* <span class="developer badge">Developer API</span>
2424
* A set of functions used to aggregate data.
2525
*
2626
* @param createCombiner function to create the initial value of the aggregation.
2727
* @param mergeValue function to merge a new value into the aggregation result.
2828
* @param mergeCombiners function to merge outputs from multiple mergeValue function.
2929
*/
30+
@DeveloperAPI
3031
case class Aggregator[K, V, C] (
3132
createCombiner: V => C,
3233
mergeValue: (C, V) => C,

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,7 @@ import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerTy
5454
* @param config a Spark Config object describing the application configuration. Any settings in
5555
* this config overrides the default configs as well as system properties.
5656
*/
57-
class SparkContext(config: SparkConf)
58-
extends Logging {
57+
class SparkContext(config: SparkConf) extends Logging {
5958

6059
// This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
6160
// etc) too. This is typically generated from InputFormatInfo.computePreferredLocations. It

core/src/main/scala/org/apache/spark/TaskEndReason.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,16 @@
1717

1818
package org.apache.spark
1919

20+
import org.apache.spark.annotations.DeveloperAPI
2021
import org.apache.spark.executor.TaskMetrics
2122
import org.apache.spark.storage.BlockManagerId
2223

2324
/**
24-
* <span class="developer badge">Developer API</span>
2525
* Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
2626
* tasks several times for "ephemeral" failures, and only report back failures that require some
2727
* old stages to be resubmitted, such as shuffle map fetch failures.
2828
*/
29-
29+
@DeveloperAPI
3030
sealed trait TaskEndReason
3131

3232
/** <span class="developer badge">Developer API</span> */
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.annotations;
19+
20+
import java.lang.annotation.*;
21+
22+
@Retention(RetentionPolicy.SOURCE)
23+
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
24+
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
25+
public @interface DeveloperAPI {}
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.annotations;
19+
20+
import java.lang.annotation.*;
21+
22+
@Retention(RetentionPolicy.SOURCE)
23+
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD,
24+
ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE})
25+
public @interface Experimental {}

core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,12 @@
1818
package org.apache.spark.scheduler
1919

2020
import org.apache.spark.storage.RDDInfo
21+
import org.apache.spark.annotations.DeveloperAPI
2122

2223
/**
23-
* <span class="developer badge">Developer API</span>
2424
* Stores information about a stage to pass from the scheduler to SparkListeners.
2525
*/
26+
@DeveloperAPI
2627
class StageInfo(val stageId: Int, val name: String, val numTasks: Int, val rddInfo: RDDInfo) {
2728
/** When this stage was submitted from the DAGScheduler to a TaskScheduler. */
2829
var submissionTime: Option[Long] = None

docs/_config.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
pygments: true
2-
markdown: kramdown
2+
markdown: rdiscount
33

44
# These allow the documentation to be updated with nerw releases
55
# of Spark, Scala, and Mesos.

docs/_plugins/copy_api_dirs.rb

Lines changed: 20 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,11 @@
5151
puts "cp -r " + source + "/. " + dest
5252
cp_r(source + "/.", dest)
5353

54+
# Append custom JavaScript
55+
js = File.readlines("./js/api-docs.js")
56+
js_file = dest + "/lib/template.js"
57+
File.open(js_file, 'a') { |f| f.write("\n" + js.join()) }
58+
5459
# Append custom CSS
5560
css_file = dest + "/lib/template.css"
5661
extra_css = [
@@ -62,19 +67,19 @@
6267
File.open(css_file, 'a') { |f| f.write(extra_css) }
6368
end
6469

65-
# Build Epydoc for Python
66-
puts "Moving to python directory and building epydoc."
67-
cd("../python")
68-
puts `epydoc --config epydoc.conf`
69-
70-
puts "Moving back into docs dir."
71-
cd("../docs")
72-
73-
puts "echo making directory pyspark"
74-
mkdir_p "pyspark"
75-
76-
puts "cp -r ../python/docs/. api/pyspark"
77-
cp_r("../python/docs/.", "api/pyspark")
78-
79-
cd("..")
70+
# # Build Epydoc for Python
71+
# puts "Moving to python directory and building epydoc."
72+
# cd("../python")
73+
# puts `epydoc --config epydoc.conf`
74+
#
75+
# puts "Moving back into docs dir."
76+
# cd("../docs")
77+
#
78+
# puts "echo making directory pyspark"
79+
# mkdir_p "pyspark"
80+
#
81+
# puts "cp -r ../python/docs/. api/pyspark"
82+
# cp_r("../python/docs/.", "api/pyspark")
83+
#
84+
# cd("..")
8085
end

docs/js/api-docs.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
/* Dynamically injected post-processing code for the API docs */
2+
3+
$(document).ready(function() {
4+
console.log("Ready")
5+
});

0 commit comments

Comments
 (0)