Skip to content

Commit

Permalink
limited to longest 1000 tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
sarutak committed May 12, 2015
2 parents 2a9e376 + 1669675 commit 29eae3e
Show file tree
Hide file tree
Showing 703 changed files with 67,517 additions and 8,191 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ scalastyle.txt
scalastyle-output.xml
R-unit-tests.log
R/unit-tests.out
python/lib/pyspark.zip

# For Hive
metastore_db/
Expand Down
12 changes: 11 additions & 1 deletion .rat-excludes
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ TAGS
RELEASE
control
docs
docker.properties.template
fairscheduler.xml.template
spark-defaults.conf.template
log4j.properties
Expand All @@ -29,10 +30,12 @@ spark-env.sh.template
log4j-defaults.properties
bootstrap-tooltip.js
jquery-1.11.1.min.js
d3.min.js
dagre-d3.min.js
graphlib-dot.min.js
sorttable.js
vis.min.js
vis.min.css
vis.map
.*avsc
.*txt
.*json
Expand Down Expand Up @@ -70,5 +73,12 @@ logs
.*scalastyle-output.xml
.*dependency-reduced-pom.xml
known_translations
json_expectation
local-1422981759269/*
local-1422981780767/*
local-1425081759269/*
local-1426533911241/*
local-1426633911242/*
local-1430917381534/*
DESCRIPTION
NAMESPACE
46 changes: 46 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -643,6 +643,36 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

========================================================================
For d3 (core/src/main/resources/org/apache/spark/ui/static/d3.min.js):
========================================================================

Copyright (c) 2010-2015, Michael Bostock
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.

* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.

* The name Michael Bostock may not be used to endorse or promote products
derived from this software without specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

========================================================================
For Scala Interpreter classes (all .scala files in repl/src/main/scala
Expand Down Expand Up @@ -806,6 +836,22 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

========================================================================
For vis.js (core/src/main/resources/org/apache/spark/ui/static/vis.min.js):
========================================================================
Copyright (C) 2010-2015 Almende B.V.

Vis.js is dual licensed under both

* The Apache 2.0 License
http://www.apache.org/licenses/LICENSE-2.0

and

* The MIT License
http://opensource.org/licenses/MIT

Vis.js may be distributed under either license.

========================================================================
BSD-style licenses
Expand Down
2 changes: 1 addition & 1 deletion R/pkg/DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ Suggests:
Description: R frontend for Spark
License: Apache License (== 2.0)
Collate:
'schema.R'
'generics.R'
'jobj.R'
'RDD.R'
'pairRDD.R'
'schema.R'
'column.R'
'group.R'
'DataFrame.R'
Expand Down
122 changes: 21 additions & 101 deletions R/pkg/NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,121 +1,44 @@
#exportPattern("^[[:alpha:]]+")
exportClasses("RDD")
exportClasses("Broadcast")
exportMethods(
"aggregateByKey",
"aggregateRDD",
"cache",
"cartesian",
"checkpoint",
"coalesce",
"cogroup",
"collect",
"collectAsMap",
"collectPartition",
"combineByKey",
"count",
"countByKey",
"countByValue",
"distinct",
"Filter",
"filterRDD",
"first",
"flatMap",
"flatMapValues",
"fold",
"foldByKey",
"foreach",
"foreachPartition",
"fullOuterJoin",
"glom",
"groupByKey",
"intersection",
"join",
"keyBy",
"keys",
"length",
"lapply",
"lapplyPartition",
"lapplyPartitionsWithIndex",
"leftOuterJoin",
"lookup",
"map",
"mapPartitions",
"mapPartitionsWithIndex",
"mapValues",
"maximum",
"minimum",
"numPartitions",
"partitionBy",
"persist",
"pipeRDD",
"reduce",
"reduceByKey",
"reduceByKeyLocally",
"repartition",
"rightOuterJoin",
"sampleByKey",
"sampleRDD",
"saveAsTextFile",
"saveAsObjectFile",
"sortBy",
"sortByKey",
"subtract",
"subtractByKey",
"sumRDD",
"take",
"takeOrdered",
"takeSample",
"top",
"unionRDD",
"unpersist",
"value",
"values",
"zipPartitions",
"zipRDD",
"zipWithIndex",
"zipWithUniqueId"
)
# Imports from base R
importFrom(methods, setGeneric, setMethod, setOldClass)
useDynLib(SparkR, stringHashCode)

# S3 methods exported
export(
"textFile",
"objectFile",
"parallelize",
"hashCode",
"includePackage",
"broadcast",
"setBroadcastValue",
"setCheckpointDir"
)
export("sparkR.init")
export("sparkR.stop")
export("print.jobj")
useDynLib(SparkR, stringHashCode)
importFrom(methods, setGeneric, setMethod, setOldClass)

# SparkRSQL

exportClasses("DataFrame")

exportMethods("columns",
exportMethods("arrange",
"cache",
"collect",
"columns",
"count",
"describe",
"distinct",
"dtypes",
"except",
"explain",
"filter",
"first",
"group_by",
"groupBy",
"head",
"insertInto",
"intersect",
"isLocal",
"join",
"limit",
"orderBy",
"mutate",
"names",
"persist",
"printSchema",
"registerTempTable",
"rename",
"repartition",
"sampleDF",
"sample_frac",
"saveAsParquetFile",
"saveAsTable",
"saveDF",
Expand All @@ -124,10 +47,10 @@ exportMethods("columns",
"selectExpr",
"show",
"showDF",
"sortDF",
"toJSON",
"toRDD",
"summarize",
"take",
"unionAll",
"unpersist",
"where",
"withColumn",
"withColumnRenamed")
Expand All @@ -154,6 +77,8 @@ exportMethods("abs",
"max",
"mean",
"min",
"n",
"n_distinct",
"rlike",
"sqrt",
"startsWith",
Expand All @@ -174,19 +99,14 @@ export("cacheTable",
"createExternalTable",
"dropTempTable",
"jsonFile",
"jsonRDD",
"loadDF",
"parquetFile",
"sql",
"table",
"tableNames",
"tables",
"toDF",
"uncacheTable")

export("sparkRSQL.init",
"sparkRHive.init")

export("structField",
"structField.jobj",
"structField.character",
Expand Down
Loading

0 comments on commit 29eae3e

Please sign in to comment.