Skip to content

Commit

Permalink
[SPARK-39503][SQL] Add session catalog name for v1 database table and…
Browse files Browse the repository at this point in the history
… function

### What changes were proposed in this pull request?

- Add session catalog name in identifiers, then all identifiers will be 3 part name

### Why are the changes needed?

To make it more clearer that this table or function comes from which catalog. It affects:

- the scan table/permanent view of the query plan
- the target table of the data writing
- desc database
- desc table
- desc function

Note that, we do not support temporary view since it does not belong to any database and catalog

This a new appraoch of apache#36936 that:
- add catalog field in identifier, so identifier just print catalog if defined
- inject catalog at the beginning of identifier life

### Does this PR introduce _any_ user-facing change?

maybe yes, so add a new config `spark.sql.legacy.nonIdentifierOutputCatalogName` to restore the old behavior

### How was this patch tested?

change list:
```scala
docs/sql-migration-guide.md                                                                          |   1 +

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala              |  10 +++++++---
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala               |  28 ++++++++++++++++++---------
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala                    |   1 +
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/identifiers.scala                          |  56 ++++++++++++++++++++++++++++++++++++++++++++---------
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala                       |   4 ++--
sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala          |  13 +++++++++----
sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/LookupCatalog.scala               |   8 ++++++--
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala                              |   9 +++++++++
sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala                                   |   7 ++++---
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala                             |  14 ++++++++------
sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala                       |   6 ++++--
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala    |   1 +
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala                        |   5 +++--
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala                              |   5 +++--

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala         |  5 +++--
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala          |  58 ++++++++++++++++++++++++++++++-------------------------
sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/LookupCatalogSuite.scala          |   4 +++-
sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala                                |   4 +++-
sql/core/src/test/scala/org/apache/spark/sql/DataFrameWriterV2Suite.scala                            |  13 +++++++++----
sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala                                     |   7 +++++--
sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala                     |   4 +++-
sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala                            |  39 ++++++++++++++++++++++++------------
sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala                        |   7 +++++--
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala                        |  36 +++++++++++++++++++++--------------
sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala             |  51 +++++++++++++++++++++++++++++----------------------
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DescribeNamespaceSuite.scala       |  12 +++++++-----
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DescribeTableSuite.scala           |   2 ++
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTblPropertiesSuite.scala       |   3 ++-
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeNamespaceSuite.scala       |   1 +
sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala                  |   3 ++-
sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala                    |  25 +++++++++++++-----------
sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala                              |   7 ++++---
sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala                                     |   9 +++++----
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala                       |  27 ++++++++++++++++----------
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala                   |   3 ++-
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSQLViewSuite.scala                   |   4 +++-
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala                 |  13 +++++++------
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala                       |   3 ++-
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala                      |   8 +++++---
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/DescribeTableSuite.scala         |   2 ++
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowFunctionsSuite

sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out                                    |  20 ++++++++++++++++++-
sql/core/src/test/resources/sql-tests/results/describe-table-after-alter-table.sql.out               |   5 +++++
sql/core/src/test/resources/sql-tests/results/describe.sql.out                                       |  22 +++++++++++++--------
sql/core/src/test/resources/sql-tests/results/explain-aqe.sql.out                                    | 108 +++++++++++++++++++++++++++++++++++++++++++++++++++----------------------------------------------------
sql/core/src/test/resources/sql-tests/results/explain-cbo.sql.out                                    |   8 ++++----
sql/core/src/test/resources/sql-tests/results/explain.sql.out                                        | 100 +++++++++++++++++++++++++++++++++++++++++++++++------------------------------------------------
sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out                                   |   2 +-
sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out                         |  40 +++++++++++++++++++++++++-------------
sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out                             |   2 +-
sql/core/src/test/resources/sql-tests/results/show-tables.sql.out                                    |  10 ++++++----
sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out                             |   2 +-
sql/core/src/test/resources/sql-tests/results/udaf.sql.out                                           |   4 ++--
sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out                                   |   4 ++--
```

Closes apache#37021 from ulysses-you/output-catalog-2.

Authored-by: ulysses-you <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
  • Loading branch information
ulysses-you authored and cloud-fan committed Jul 1, 2022
1 parent 09d1bae commit f8a3775
Show file tree
Hide file tree
Showing 697 changed files with 6,501 additions and 6,296 deletions.
1 change: 1 addition & 0 deletions docs/sql-migration-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ license: |
## Upgrading from Spark SQL 3.3 to 3.4

- Since Spark 3.4, Number or Number(\*) from Teradata will be treated as Decimal(38,18). In Spark 3.3 or earlier, Number or Number(\*) from Teradata will be treated as Decimal(38, 0), in which case the fractional part will be removed.
- Since Spark 3.4, v1 database, table, permanent view and function identifier will include 'spark_catalog' as the catalog name if database is defined, e.g. a table identifier will be: `spark_catalog.default.t`. To restore the legacy behavior, set `spark.sql.legacy.v1IdentifierNoCatalog` to `true`.

## Upgrading from Spark SQL 3.2 to 3.3

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path

import org.apache.spark.SparkConf
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.CatalystIdentifier._
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils._
import org.apache.spark.sql.catalyst.expressions.Expression
Expand Down Expand Up @@ -343,7 +344,8 @@ class InMemoryCatalog(

override def getTable(db: String, table: String): CatalogTable = synchronized {
requireTableExists(db, table)
catalog(db).tables(table).table
val catalogTable = catalog(db).tables(table).table
catalogTable.copy(identifier = attachSessionCatalog(catalogTable.identifier))
}

override def getTablesByName(db: String, tables: Seq[String]): Seq[CatalogTable] = {
Expand Down Expand Up @@ -639,7 +641,8 @@ class InMemoryCatalog(

override def getFunction(db: String, funcName: String): CatalogFunction = synchronized {
requireFunctionExists(db, funcName)
catalog(db).functions(funcName)
val catalogFunction = catalog(db).functions(funcName)
catalogFunction.copy(identifier = attachSessionCatalog(catalogFunction.identifier))
}

override def functionExists(db: String, funcName: String): Boolean = synchronized {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.Path
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.CatalystIdentifier._
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, ExpressionInfo, UpCast}
Expand Down Expand Up @@ -346,7 +347,7 @@ class SessionCatalog(

val db = formatDatabaseName(tableDefinition.identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableDefinition.identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
val tableIdentifier = attachSessionCatalog(TableIdentifier(table, Some(db)))
validateName(table)

val newTableDefinition = if (tableDefinition.storage.locationUri.isDefined
Expand Down Expand Up @@ -412,7 +413,7 @@ class SessionCatalog(
def alterTable(tableDefinition: CatalogTable): Unit = {
val db = formatDatabaseName(tableDefinition.identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(tableDefinition.identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
val tableIdentifier = attachSessionCatalog(TableIdentifier(table, Some(db)))
requireDbExists(db)
requireTableExists(tableIdentifier)
val newTableDefinition = if (tableDefinition.storage.locationUri.isDefined
Expand Down Expand Up @@ -443,7 +444,7 @@ class SessionCatalog(
newDataSchema: StructType): Unit = {
val db = formatDatabaseName(identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
val tableIdentifier = attachSessionCatalog(TableIdentifier(table, Some(db)))
requireDbExists(db)
requireTableExists(tableIdentifier)

Expand All @@ -470,7 +471,7 @@ class SessionCatalog(
def alterTableStats(identifier: TableIdentifier, newStats: Option[CatalogStatistics]): Unit = {
val db = formatDatabaseName(identifier.database.getOrElse(getCurrentDatabase))
val table = formatTableName(identifier.table)
val tableIdentifier = TableIdentifier(table, Some(db))
val tableIdentifier = attachSessionCatalog(TableIdentifier(table, Some(db)))
requireDbExists(db)
requireTableExists(tableIdentifier)
externalCatalog.alterTableStats(db, table, newStats)
Expand Down Expand Up @@ -1365,7 +1366,8 @@ class SessionCatalog(
def createFunction(funcDefinition: CatalogFunction, ignoreIfExists: Boolean): Unit = {
val db = formatDatabaseName(funcDefinition.identifier.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db))
val identifier = attachSessionCatalog(
FunctionIdentifier(funcDefinition.identifier.funcName, Some(db)))
val newFuncDefinition = funcDefinition.copy(identifier = identifier)
if (!functionExists(identifier)) {
externalCatalog.createFunction(db, newFuncDefinition)
Expand All @@ -1381,7 +1383,7 @@ class SessionCatalog(
def dropFunction(name: FunctionIdentifier, ignoreIfNotExists: Boolean): Unit = {
val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = name.copy(database = Some(db))
val identifier = attachSessionCatalog(name.copy(database = Some(db)))
if (functionExists(identifier)) {
if (functionRegistry.functionExists(identifier)) {
// If we have loaded this function into the FunctionRegistry,
Expand All @@ -1403,7 +1405,8 @@ class SessionCatalog(
def alterFunction(funcDefinition: CatalogFunction): Unit = {
val db = formatDatabaseName(funcDefinition.identifier.database.getOrElse(getCurrentDatabase))
requireDbExists(db)
val identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db))
val identifier = attachSessionCatalog(
FunctionIdentifier(funcDefinition.identifier.funcName, Some(db)))
val newFuncDefinition = funcDefinition.copy(identifier = identifier)
if (functionExists(identifier)) {
if (functionRegistry.functionExists(identifier)) {
Expand Down Expand Up @@ -1654,7 +1657,7 @@ class SessionCatalog(
*/
def lookupPersistentFunction(name: FunctionIdentifier): ExpressionInfo = {
val database = name.database.orElse(Some(currentDb)).map(formatDatabaseName)
val qualifiedName = name.copy(database = database)
val qualifiedName = attachSessionCatalog(name.copy(database = database))
functionRegistry.lookupFunction(qualifiedName)
.orElse(tableFunctionRegistry.lookupFunction(qualifiedName))
.getOrElse {
Expand Down Expand Up @@ -1798,7 +1801,7 @@ class SessionCatalog(
functions.map {
case f if FunctionRegistry.functionSet.contains(f) => (f, "SYSTEM")
case f if TableFunctionRegistry.functionSet.contains(f) => (f, "SYSTEM")
case f => (f, "USER")
case f => (attachSessionCatalog(f), "USER")
}.distinct
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,7 @@ case class CatalogTable(
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
}

identifier.catalog.foreach(map.put("Catalog", _))
identifier.database.foreach(map.put("Database", _))
map.put("Table", identifier.table)
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,20 @@

package org.apache.spark.sql.catalyst

import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}

/**
* An identifier that optionally specifies a database.
*
* Format (unquoted): "name" or "db.name"
* Format (quoted): "`name`" or "`db`.`name`"
*/
sealed trait IdentifierWithDatabase {
sealed trait CatalystIdentifier {
val identifier: String

def database: Option[String]
def catalog: Option[String]

/*
* Escapes back-ticks within the identifier name with double-back-ticks.
Expand All @@ -35,18 +39,53 @@ sealed trait IdentifierWithDatabase {

def quotedString: String = {
val replacedId = quoteIdentifier(identifier)
val replacedDb = database.map(quoteIdentifier(_))

if (replacedDb.isDefined) s"`${replacedDb.get}`.`$replacedId`" else s"`$replacedId`"
val replacedDb = database.map(quoteIdentifier)
val replacedCatalog = catalog.map(quoteIdentifier)

if (replacedCatalog.isDefined && replacedDb.isDefined) {
s"`${replacedCatalog.get}`.`${replacedDb.get}`.`$replacedId`"
} else if (replacedDb.isDefined) {
s"`${replacedDb.get}`.`$replacedId`"
} else {
s"`$replacedId`"
}
}

def unquotedString: String = {
if (database.isDefined) s"${database.get}.$identifier" else identifier
if (catalog.isDefined && database.isDefined) {
s"${catalog.get}.${database.get}.$identifier"
} else if (database.isDefined) {
s"${database.get}.$identifier"
} else {
identifier
}
}

override def toString: String = quotedString
}

object CatalystIdentifier {
private def sessionCatalogOption(database: Option[String]): Option[String] = {
if (!SQLConf.get.getConf(SQLConf.LEGACY_NON_IDENTIFIER_OUTPUT_CATALOG_NAME) &&
database.isDefined &&
database.get != SQLConf.get.getConf(StaticSQLConf.GLOBAL_TEMP_DATABASE)) {
Some(SESSION_CATALOG_NAME)
} else {
None
}
}

def attachSessionCatalog(identifier: TableIdentifier): TableIdentifier = {
val catalog = identifier.catalog.orElse(sessionCatalogOption(identifier.database))
identifier.copy(catalog = catalog)
}

def attachSessionCatalog(identifier: FunctionIdentifier): FunctionIdentifier = {
val catalog = identifier.catalog.orElse(sessionCatalogOption(identifier.database))
identifier.copy(catalog = catalog)
}
}

/**
* Encapsulates an identifier that is either a alias name or an identifier that has table
* name and a qualifier.
Expand All @@ -72,12 +111,13 @@ object AliasIdentifier {
* When we register a permanent function in the FunctionRegistry, we use
* unquotedString as the function name.
*/
case class TableIdentifier(table: String, database: Option[String])
extends IdentifierWithDatabase {
case class TableIdentifier(table: String, database: Option[String], catalog: Option[String])
extends CatalystIdentifier {

override val identifier: String = table

def this(table: String) = this(table, None)
def this(table: String) = this(table, None, None)
def this(table: String, database: Option[String]) = this(table, database, None)
}

/** A fully qualified identifier for a table (i.e., database.tableName) */
Expand All @@ -87,23 +127,28 @@ case class QualifiedTableName(database: String, name: String) {

object TableIdentifier {
def apply(tableName: String): TableIdentifier = new TableIdentifier(tableName)
def apply(table: String, database: Option[String]): TableIdentifier =
new TableIdentifier(table, database)
}


/**
* Identifies a function in a database.
* If `database` is not defined, the current database is used.
*/
case class FunctionIdentifier(funcName: String, database: Option[String])
extends IdentifierWithDatabase {
case class FunctionIdentifier(funcName: String, database: Option[String], catalog: Option[String])
extends CatalystIdentifier {

override val identifier: String = funcName

def this(funcName: String) = this(funcName, None)
def this(funcName: String) = this(funcName, None, None)
def this(table: String, database: Option[String]) = this(table, database, None)

override def toString: String = unquotedString
}

object FunctionIdentifier {
def apply(funcName: String): FunctionIdentifier = new FunctionIdentifier(funcName)
def apply(funcName: String, database: Option[String]): FunctionIdentifier =
new FunctionIdentifier(funcName, database)
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.sql.catalyst.{AliasIdentifier, IdentifierWithDatabase}
import org.apache.spark.sql.catalyst.{AliasIdentifier, CatalystIdentifier}
import org.apache.spark.sql.catalyst.ScalaReflection._
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource}
import org.apache.spark.sql.catalyst.expressions._
Expand Down Expand Up @@ -1161,7 +1161,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product with Tre
private def shouldConvertToJson(product: Product): Boolean = product match {
case exprId: ExprId => true
case field: StructField => true
case id: IdentifierWithDatabase => true
case id: CatalystIdentifier => true
case alias: AliasIdentifier => true
case join: JoinType => true
case spec: BucketSpec => true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark.sql.connector.catalog
import scala.collection.mutable

import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.CatalystIdentifier._
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.quoteIfNeeded
Expand Down Expand Up @@ -132,15 +133,17 @@ private[sql] object CatalogV2Implicits {

def asTableIdentifier: TableIdentifier = ident.namespace match {
case ns if ns.isEmpty => TableIdentifier(ident.name)
case Array(dbName) => TableIdentifier(ident.name, Some(dbName))
case Array(dbName) =>
attachSessionCatalog(TableIdentifier(ident.name, Some(dbName)))
case _ =>
throw QueryCompilationErrors.identifierHavingMoreThanTwoNamePartsError(
quoted, "TableIdentifier")
}

def asFunctionIdentifier: FunctionIdentifier = ident.namespace() match {
case ns if ns.isEmpty => FunctionIdentifier(ident.name())
case Array(dbName) => FunctionIdentifier(ident.name(), Some(dbName))
case Array(dbName) =>
attachSessionCatalog(FunctionIdentifier(ident.name(), Some(dbName)))
case _ =>
throw QueryCompilationErrors.identifierHavingMoreThanTwoNamePartsError(
quoted, "FunctionIdentifier")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql.connector.catalog

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.{CatalystIdentifier, TableIdentifier}
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}

/**
Expand Down Expand Up @@ -137,7 +137,8 @@ private[sql] trait LookupCatalog extends Logging {
def unapply(parts: Seq[String]): Option[TableIdentifier] = {
def namesToTableIdentifier(names: Seq[String]): Option[TableIdentifier] = names match {
case Seq(name) => Some(TableIdentifier(name))
case Seq(database, name) => Some(TableIdentifier(name, Some(database)))
case Seq(database, name) =>
Some(CatalystIdentifier.attachSessionCatalog(TableIdentifier(name, Some(database))))
case _ => None
}
parts match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3848,6 +3848,16 @@ object SQLConf {
.booleanConf
.createWithDefault(false)

val LEGACY_NON_IDENTIFIER_OUTPUT_CATALOG_NAME =
buildConf("spark.sql.legacy.v1IdentifierNoCatalog")
.internal()
.doc(s"When set to false, the v1 identifier will include '$SESSION_CATALOG_NAME' as " +
"the catalog name if database is defined. When set to true, it restores the legacy " +
"behavior that does not include catalog name.")
.version("3.4.0")
.booleanConf
.createWithDefault(false)

/**
* Holds information about keys that have been deprecated.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, N
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
Expand Down Expand Up @@ -768,8 +769,8 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
test("get function") {
val catalog = newBasicCatalog()
assert(catalog.getFunction("db2", "func1") ==
CatalogFunction(FunctionIdentifier("func1", Some("db2")), funcClass,
Seq.empty[FunctionResource]))
CatalogFunction(FunctionIdentifier("func1", Some("db2"), Some(SESSION_CATALOG_NAME)),
funcClass, Seq.empty[FunctionResource]))
intercept[NoSuchFunctionException] {
catalog.getFunction("db2", "does_not_exist")
}
Expand Down
Loading

0 comments on commit f8a3775

Please sign in to comment.