Skip to content

Commit

Permalink
Rename dataSourceName to source.
Browse files Browse the repository at this point in the history
  • Loading branch information
yhuai committed Feb 10, 2015
1 parent d1c12d3 commit cbc717f
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 46 deletions.
28 changes: 14 additions & 14 deletions sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
Original file line number Diff line number Diff line change
Expand Up @@ -617,8 +617,8 @@ trait DataFrame extends RDDApi[Row] {
@Experimental
def saveAsTable(
tableName: String,
dataSourceName: String): Unit = {
saveAsTable(tableName, dataSourceName, SaveMode.ErrorIfExists)
source: String): Unit = {
saveAsTable(tableName, source, SaveMode.ErrorIfExists)
}

/**
Expand All @@ -634,9 +634,9 @@ trait DataFrame extends RDDApi[Row] {
@Experimental
def saveAsTable(
tableName: String,
dataSourceName: String,
source: String,
mode: SaveMode): Unit = {
saveAsTable(tableName, dataSourceName, mode, Map.empty[String, String])
saveAsTable(tableName, source, mode, Map.empty[String, String])
}

/**
Expand All @@ -652,10 +652,10 @@ trait DataFrame extends RDDApi[Row] {
@Experimental
def saveAsTable(
tableName: String,
dataSourceName: String,
source: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = {
saveAsTable(tableName, dataSourceName, mode, options.toMap)
saveAsTable(tableName, source, mode, options.toMap)
}

/**
Expand All @@ -672,7 +672,7 @@ trait DataFrame extends RDDApi[Row] {
@Experimental
def saveAsTable(
tableName: String,
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit

Expand Down Expand Up @@ -704,8 +704,8 @@ trait DataFrame extends RDDApi[Row] {
* using [[SaveMode.ErrorIfExists]] as the save mode.
*/
@Experimental
def save(path: String, dataSourceName: String): Unit = {
save(dataSourceName, SaveMode.ErrorIfExists, Map("path" -> path))
def save(path: String, source: String): Unit = {
save(source, SaveMode.ErrorIfExists, Map("path" -> path))
}

/**
Expand All @@ -714,8 +714,8 @@ trait DataFrame extends RDDApi[Row] {
* [[SaveMode]] specified by mode.
*/
@Experimental
def save(path: String, dataSourceName: String, mode: SaveMode): Unit = {
save(dataSourceName, mode, Map("path" -> path))
def save(path: String, source: String, mode: SaveMode): Unit = {
save(source, mode, Map("path" -> path))
}

/**
Expand All @@ -725,10 +725,10 @@ trait DataFrame extends RDDApi[Row] {
*/
@Experimental
def save(
dataSourceName: String,
source: String,
mode: SaveMode,
options: java.util.Map[String, String]): Unit = {
save(dataSourceName, mode, options.toMap)
save(source, mode, options.toMap)
}

/**
Expand All @@ -739,7 +739,7 @@ trait DataFrame extends RDDApi[Row] {
*/
@Experimental
def save(
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,13 +352,13 @@ private[sql] class DataFrameImpl protected[sql](

override def saveAsTable(
tableName: String,
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit = {
val cmd =
CreateTableUsingAsLogicalPlan(
tableName,
dataSourceName,
source,
temporary = false,
mode,
options,
Expand All @@ -368,10 +368,10 @@ private[sql] class DataFrameImpl protected[sql](
}

override def save(
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit = {
ResolvedDataSource(sqlContext, dataSourceName, mode, options, this)
ResolvedDataSource(sqlContext, source, mode, options, this)
}

override def insertInto(tableName: String, overwrite: Boolean): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,12 +156,12 @@ private[sql] class IncomputableColumn(protected[sql] val expr: Expression) exten

override def saveAsTable(
tableName: String,
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit = err()

override def save(
dataSourceName: String,
source: String,
mode: SaveMode,
options: Map[String, String]): Unit = err()

Expand Down
46 changes: 20 additions & 26 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -418,21 +418,17 @@ class SQLContext(@transient val sparkContext: SparkContext)
* using the given data source.
*/
@Experimental
def load(
path: String,
dataSourceName: String): DataFrame = {
load(dataSourceName, Map("path" -> path))
def load(path: String, source: String): DataFrame = {
load(source, Map("path" -> path))
}

/**
* :: Experimental ::
* Returns the dataset specified by the given data source and a set of options as a DataFrame.
*/
@Experimental
def load(
dataSourceName: String,
options: java.util.Map[String, String]): DataFrame = {
load(dataSourceName, options.toMap)
def load(source: String, options: java.util.Map[String, String]): DataFrame = {
load(source, options.toMap)
}

/**
Expand All @@ -441,10 +437,8 @@ class SQLContext(@transient val sparkContext: SparkContext)
* Returns the dataset specified by the given data source and a set of options as a DataFrame.
*/
@Experimental
def load(
dataSourceName: String,
options: Map[String, String]): DataFrame = {
val resolved = ResolvedDataSource(this, None, dataSourceName, options)
def load(source: String, options: Map[String, String]): DataFrame = {
val resolved = ResolvedDataSource(this, None, source, options)
DataFrame(this, LogicalRelation(resolved.relation))
}

Expand All @@ -455,10 +449,10 @@ class SQLContext(@transient val sparkContext: SparkContext)
*/
@Experimental
def load(
dataSourceName: String,
source: String,
schema: StructType,
options: java.util.Map[String, String]): DataFrame = {
load(dataSourceName, schema, options.toMap)
load(source, schema, options.toMap)
}

/**
Expand All @@ -469,10 +463,10 @@ class SQLContext(@transient val sparkContext: SparkContext)
*/
@Experimental
def load(
dataSourceName: String,
source: String,
schema: StructType,
options: Map[String, String]): DataFrame = {
val resolved = ResolvedDataSource(this, Some(schema), dataSourceName, options)
val resolved = ResolvedDataSource(this, Some(schema), source, options)
DataFrame(this, LogicalRelation(resolved.relation))
}

Expand All @@ -496,8 +490,8 @@ class SQLContext(@transient val sparkContext: SparkContext)
def createExternalTable(
tableName: String,
path: String,
dataSourceName: String): DataFrame = {
createExternalTable(tableName, dataSourceName, Map("path" -> path))
source: String): DataFrame = {
createExternalTable(tableName, source, Map("path" -> path))
}

/**
Expand All @@ -508,9 +502,9 @@ class SQLContext(@transient val sparkContext: SparkContext)
@Experimental
def createExternalTable(
tableName: String,
dataSourceName: String,
source: String,
options: java.util.Map[String, String]): DataFrame = {
createExternalTable(tableName, dataSourceName, options.toMap)
createExternalTable(tableName, source, options.toMap)
}

/**
Expand All @@ -522,13 +516,13 @@ class SQLContext(@transient val sparkContext: SparkContext)
@Experimental
def createExternalTable(
tableName: String,
dataSourceName: String,
source: String,
options: Map[String, String]): DataFrame = {
val cmd =
CreateTableUsing(
tableName,
userSpecifiedSchema = None,
dataSourceName,
source,
temporary = false,
options,
allowExisting = false,
Expand All @@ -545,10 +539,10 @@ class SQLContext(@transient val sparkContext: SparkContext)
@Experimental
def createExternalTable(
tableName: String,
dataSourceName: String,
source: String,
schema: StructType,
options: java.util.Map[String, String]): DataFrame = {
createExternalTable(tableName, dataSourceName, schema, options.toMap)
createExternalTable(tableName, source, schema, options.toMap)
}

/**
Expand All @@ -560,14 +554,14 @@ class SQLContext(@transient val sparkContext: SparkContext)
@Experimental
def createExternalTable(
tableName: String,
dataSourceName: String,
source: String,
schema: StructType,
options: Map[String, String]): DataFrame = {
val cmd =
CreateTableUsing(
tableName,
userSpecifiedSchema = Some(schema),
dataSourceName,
source,
temporary = false,
options,
allowExisting = false,
Expand Down

0 comments on commit cbc717f

Please sign in to comment.