diff --git a/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala b/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala index 3247bc6438027..f29291594069d 100644 --- a/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala +++ b/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala @@ -31,7 +31,7 @@ import org.apache.spark.{QueryContext, QueryContextType, SparkArithmeticExceptio import org.apache.spark.connect.proto.{FetchErrorDetailsRequest, FetchErrorDetailsResponse, SparkConnectServiceGrpc, UserContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException, TempTableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException, TempTableAlreadyExistsException} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.trees.Origin import org.apache.spark.sql.streaming.StreamingQueryException @@ -219,6 +219,8 @@ private[client] object GrpcExceptionConverter { params.errorClass.orNull, params.messageParameters, params.cause)), + errorConstructor(params => + new NoSuchNamespaceException(params.errorClass.orNull, params.messageParameters)), errorConstructor(params => new NoSuchTableException(params.errorClass.orNull, params.messageParameters, params.cause)), errorConstructor[NumberFormatException](params => diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala index ef0b4607fea6b..95410ab6a7e7d 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala @@ -33,7 +33,7 @@ import org.scalatest.PrivateMethodTester import org.apache.spark.{SparkArithmeticException, SparkException, SparkUpgradeException} import org.apache.spark.SparkBuildInfo.{spark_version => SPARK_VERSION} -import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchDatabaseException, TableAlreadyExistsException, TempTableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchNamespaceException, TableAlreadyExistsException, TempTableAlreadyExistsException} import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.StringEncoder import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.catalyst.parser.ParseException @@ -165,8 +165,8 @@ class ClientE2ETestSuite } } - test("throw NoSuchDatabaseException") { - val ex = intercept[NoSuchDatabaseException] { + test("throw NoSuchNamespaceException") { + val ex = intercept[NoSuchNamespaceException] { spark.sql("use database123") } assert(ex.getErrorClass != null) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 0e0852d0a550d..fa271eee73d02 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -250,7 +250,7 @@ class SessionCatalog( private def requireDbExists(db: String): Unit = { if (!databaseExists(db)) { - throw new NoSuchDatabaseException(db) + throw new NoSuchNamespaceException(Seq(CatalogManager.SESSION_CATALOG_NAME, db)) } } @@ -291,7 +291,8 @@ class SessionCatalog( def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = { val dbName = format(db) if (dbName == DEFAULT_DATABASE) { - throw QueryCompilationErrors.cannotDropDefaultDatabaseError(dbName) + throw QueryCompilationErrors.cannotDropDefaultDatabaseError( + Seq(CatalogManager.SESSION_CATALOG_NAME, dbName)) } if (!ignoreIfNotExists) { requireDbExists(dbName) @@ -527,7 +528,7 @@ class SessionCatalog( * We replace char/varchar with "annotated" string type in the table schema, as the query * engine doesn't support char/varchar yet. */ - @throws[NoSuchDatabaseException] + @throws[NoSuchNamespaceException] @throws[NoSuchTableException] def getTableMetadata(name: TableIdentifier): CatalogTable = { val t = getTableRawMetadata(name) @@ -538,7 +539,7 @@ class SessionCatalog( * Retrieve the metadata of an existing permanent table/view. If no database is specified, * assume the table/view is in the current database. */ - @throws[NoSuchDatabaseException] + @throws[NoSuchNamespaceException] @throws[NoSuchTableException] def getTableRawMetadata(name: TableIdentifier): CatalogTable = { val qualifiedIdent = qualifyIdentifier(name) @@ -556,7 +557,7 @@ class SessionCatalog( * For example, if none of the requested tables could be retrieved, an empty list is returned. * There is no guarantee of ordering of the returned tables. */ - @throws[NoSuchDatabaseException] + @throws[NoSuchNamespaceException] def getTablesByName(names: Seq[TableIdentifier]): Seq[CatalogTable] = { if (names.nonEmpty) { val qualifiedIdents = names.map(qualifyIdentifier) @@ -1056,7 +1057,6 @@ class SessionCatalog( getTempViewOrPermanentTableMetadata(ident).tableType == CatalogTableType.VIEW } catch { case _: NoSuchTableException => false - case _: NoSuchDatabaseException => false case _: NoSuchNamespaceException => false } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 7e0e2f80505df..7d32bd48f1659 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1072,10 +1072,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat messageParameters = Map("database" -> database)) } - def cannotDropDefaultDatabaseError(database: String): Throwable = { + def cannotDropDefaultDatabaseError(nameParts: Seq[String]): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE.DROP_DATABASE", - messageParameters = Map("database" -> toSQLId(database))) + messageParameters = Map("database" -> toSQLId(nameParts))) } def cannotUsePreservedDatabaseAsCurrentDatabaseError(database: String): Throwable = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 48f829548bb65..f5f6fac96872f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -229,7 +229,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("get database should throw exception when the database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.getDatabaseMetadata("db_that_does_not_exist") } } @@ -283,7 +283,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("drop database when the database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false) } catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false) @@ -295,7 +295,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { catalog.setCurrentDatabase("db1") assert(catalog.getCurrentDatabase == "db1") catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true) - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false) } catalog.setCurrentDatabase("default") @@ -321,7 +321,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("alter database should throw exception when the database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.alterDatabase(newDb("unknown_db")) } } @@ -332,7 +332,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { assert(catalog.getCurrentDatabase == "default") catalog.setCurrentDatabase("db2") assert(catalog.getCurrentDatabase == "db2") - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.setCurrentDatabase("deebo") } catalog.createDatabase(newDb("deebo"), ignoreIfExists = false) @@ -370,10 +370,10 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("create table when database does not exist") { withBasicCatalog { catalog => // Creating table in non-existent database should always fail - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = false) } - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.createTable(newTable("tbl1", "does_not_exist"), ignoreIfExists = true) } // Table already exists @@ -419,11 +419,11 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("drop table when database/table does not exist") { withBasicCatalog { catalog => // Should always throw exception when the database does not exist - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = false, purge = false) } - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.dropTable(TableIdentifier("tbl1", Some("unknown_db")), ignoreIfNotExists = true, purge = false) } @@ -494,7 +494,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("rename table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.renameTable(TableIdentifier("tbl1", Some("unknown_db")), TableIdentifier("tbl2")) } intercept[NoSuchTableException] { @@ -543,7 +543,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("alter table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.alterTable(newTable("tbl1", "unknown_db")) } intercept[NoSuchTableException] { @@ -608,7 +608,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("get table when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.getTableMetadata(TableIdentifier("tbl1", Some("unknown_db"))) } intercept[NoSuchTableException] { @@ -856,7 +856,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { TableIdentifier("tbl4"), TableIdentifier("tbl1", Some("db2")), TableIdentifier("tbl2", Some("db2")))) - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.listTables("unknown_db") } } @@ -876,7 +876,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { TableIdentifier("tbl2", Some("db2")))) assert(catalog.listTables("db2", "*1").toSet == Set(TableIdentifier("tbl1"), TableIdentifier("tbl1", Some("db2")))) - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.listTables("unknown_db", "*") } } @@ -970,7 +970,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("create partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.createPartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(), ignoreIfExists = false) } @@ -1077,7 +1077,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("drop partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.dropPartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(), @@ -1177,7 +1177,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("get partition when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.getPartition(TableIdentifier("tbl1", Some("unknown_db")), part1.spec) } intercept[NoSuchTableException] { @@ -1258,7 +1258,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("rename partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.renamePartitions( TableIdentifier("tbl1", Some("unknown_db")), Seq(part1.spec), Seq(part2.spec)) } @@ -1349,7 +1349,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("alter partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.alterPartitions(TableIdentifier("tbl1", Some("unknown_db")), Seq(part1)) } intercept[NoSuchTableException] { @@ -1497,7 +1497,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("list partitions when database/table does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.listPartitions(TableIdentifier("tbl1", Some("unknown_db"))) } intercept[NoSuchTableException] { @@ -1544,7 +1544,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("create function when database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.createFunction( newFunc("func5", Some("does_not_exist")), ignoreIfExists = false) } @@ -1687,7 +1687,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("drop function when database/function does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.dropFunction( FunctionIdentifier("something", Some("unknown_db")), ignoreIfNotExists = false) } @@ -1746,7 +1746,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("get function when database/function does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.getFunctionMetadata(FunctionIdentifier("func1", Some("unknown_db"))) } intercept[NoSuchFunctionException] { @@ -1799,7 +1799,7 @@ abstract class SessionCatalogSuite extends AnalysisTest with Eventually { test("list functions when database does not exist") { withBasicCatalog { catalog => - intercept[NoSuchDatabaseException] { + intercept[NoSuchNamespaceException] { catalog.listFunctions("unknown_db", "func*") } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala index 654fa0719cf82..0515237adfae5 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala @@ -210,7 +210,7 @@ class InMemoryTableCatalog extends BasicInMemoryTableCatalog with SupportsNamesp case _ if namespaceExists(namespace) => util.Collections.emptyMap[String, String] case _ => - throw new NoSuchNamespaceException(namespace) + throw new NoSuchNamespaceException(name() +: namespace) } } @@ -256,7 +256,7 @@ class InMemoryTableCatalog extends BasicInMemoryTableCatalog with SupportsNamesp if (namespace.isEmpty || namespaceExists(namespace)) { super.listTables(namespace) } else { - throw new NoSuchNamespaceException(namespace) + throw new NoSuchNamespaceException(name() +: namespace) } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala index 5d302055e7d91..2f995ec938147 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala @@ -45,7 +45,7 @@ case class DropNamespaceExec( throw QueryCompilationErrors.cannotDropNonemptyNamespaceError(namespace) } } else if (!ifExists) { - throw QueryCompilationErrors.noSuchNamespaceError(ns) + throw QueryCompilationErrors.noSuchNamespaceError(catalog.name() +: ns) } Seq.empty diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index e619c59a7540c..bc1e2c92faa83 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -25,7 +25,7 @@ import scala.jdk.CollectionConverters._ import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName, SQLConfHelper, TableIdentifier} -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogStorageFormat, CatalogTable, CatalogTableType, CatalogUtils, ClusterBySpec, SessionCatalog} import org.apache.spark.sql.catalyst.util.TypeUtils._ import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogV2Util, Column, FunctionCatalog, Identifier, NamespaceChange, SupportsNamespaces, Table, TableCatalog, TableCatalogCapability, TableChange, V1Table} @@ -68,7 +68,7 @@ class V2SessionCatalog(catalog: SessionCatalog) .map(ident => Identifier.of(ident.database.map(Array(_)).getOrElse(Array()), ident.table)) .toArray case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } } @@ -123,7 +123,7 @@ class V2SessionCatalog(catalog: SessionCatalog) V1Table(table) } } catch { - case _: NoSuchDatabaseException => + case _: NoSuchNamespaceException => throw QueryCompilationErrors.noSuchTableError(ident) } } @@ -380,7 +380,7 @@ class V2SessionCatalog(catalog: SessionCatalog) case Array(db) if catalog.databaseExists(db) => Array() case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } } @@ -390,12 +390,12 @@ class V2SessionCatalog(catalog: SessionCatalog) try { catalog.getDatabaseMetadata(db).toMetadata } catch { - case _: NoSuchDatabaseException => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + case _: NoSuchNamespaceException => + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } } @@ -430,7 +430,7 @@ class V2SessionCatalog(catalog: SessionCatalog) toCatalogDatabase(db, CatalogV2Util.applyNamespaceChanges(metadata, changes))) case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } } @@ -446,7 +446,7 @@ class V2SessionCatalog(catalog: SessionCatalog) false case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } def isTempView(ident: Identifier): Boolean = { @@ -465,7 +465,7 @@ class V2SessionCatalog(catalog: SessionCatalog) Identifier.of(Array(funcIdent.database.get), funcIdent.identifier) }.toArray case _ => - throw QueryCompilationErrors.noSuchNamespaceError(namespace) + throw QueryCompilationErrors.noSuchNamespaceError(name() +: namespace) } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out index f7b0e3370f9f4..a02bf525f947d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-disabled.sql.out @@ -134,12 +134,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query USE SCHEMA `not_exist` -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out index f241f9bd6867c..22dfeac5fd0b6 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/double-quoted-identifiers-enabled.sql.out @@ -22,12 +22,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query USE SCHEMA "not_exist" -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } @@ -177,12 +177,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query USE SCHEMA `not_exist` -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/double-quoted-identifiers.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/double-quoted-identifiers.sql.out index f7b0e3370f9f4..a02bf525f947d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/double-quoted-identifiers.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/double-quoted-identifiers.sql.out @@ -134,12 +134,12 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException -- !query USE SCHEMA `not_exist` -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/show-views.sql.out index ed3690ec5c6a3..d092590b143b5 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/show-views.sql.out @@ -103,12 +103,12 @@ ShowViewsCommand showdb, view_*, [namespace#x, viewName#x, isTemporary#x] -- !query SHOW VIEWS IN wrongdb LIKE 'view_*' -- !query analysis -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`wrongdb`" + "schemaName" : "`spark_catalog`.`wrongdb`" } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out index 861afcc7b1005..81a98a60590f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-disabled.sql.out @@ -154,12 +154,12 @@ USE SCHEMA `not_exist` -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out index 2854e09aab6bd..2444c399a87ec 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/double-quoted-identifiers-enabled.sql.out @@ -26,12 +26,12 @@ USE SCHEMA "not_exist" -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } @@ -197,12 +197,12 @@ USE SCHEMA `not_exist` -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out index 861afcc7b1005..81a98a60590f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/double-quoted-identifiers.sql.out @@ -154,12 +154,12 @@ USE SCHEMA `not_exist` -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`not_exist`" + "schemaName" : "`spark_catalog`.`not_exist`" } } diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out index bfed13683d9dd..249f5a1d95527 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out @@ -138,12 +138,12 @@ SHOW VIEWS IN wrongdb LIKE 'view_*' -- !query schema struct<> -- !query output -org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException { "errorClass" : "SCHEMA_NOT_FOUND", "sqlState" : "42704", "messageParameters" : { - "schemaName" : "`wrongdb`" + "schemaName" : "`spark_catalog`.`wrongdb`" } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala index 5f1fa2904e341..919958d304f10 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala @@ -850,7 +850,7 @@ class StatisticsCollectionSuite extends StatisticsCollectionTestBase with Shared } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`db_not_exists`")) + parameters = Map("schemaName" -> "`spark_catalog`.`db_not_exists`")) } test("SPARK-43383: Add rowCount statistics to LocalRelation") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index e776de7e8222c..51d7f270e1a53 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -28,7 +28,7 @@ import org.apache.spark.{SparkException, SparkRuntimeException, SparkUnsupported import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.CurrentUserContext.CURRENT_USER import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NoSuchDatabaseException, NoSuchNamespaceException, TableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NoSuchNamespaceException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.ColumnStat import org.apache.spark.sql.catalyst.statsEstimation.StatsEstimationTestBase @@ -1427,12 +1427,12 @@ class DataSourceV2SQLSuiteV1Filter } test("Use: v2 session catalog is used and namespace does not exist") { - val exception = intercept[NoSuchDatabaseException] { + val exception = intercept[AnalysisException] { sql("USE ns1") } checkError(exception, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`ns1`")) + parameters = Map("schemaName" -> "`spark_catalog`.`ns1`")) } test("SPARK-31100: Use: v2 catalog that implements SupportsNamespaces is used " + @@ -2588,7 +2588,7 @@ class DataSourceV2SQLSuiteV1Filter checkError( exception = intercept[AnalysisException](sql("COMMENT ON NAMESPACE abc IS NULL")), errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`abc`")) + parameters = Map("schemaName" -> "`spark_catalog`.`abc`")) // V2 non-session catalog is used. sql("CREATE NAMESPACE testcat.ns1") @@ -2598,7 +2598,7 @@ class DataSourceV2SQLSuiteV1Filter checkError( exception = intercept[AnalysisException](sql("COMMENT ON NAMESPACE testcat.abc IS NULL")), errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`abc`")) + parameters = Map("schemaName" -> "`testcat`.`abc`")) } private def checkNamespaceComment(namespace: String, comment: String): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetLocationSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetLocationSuiteBase.scala index 5b78665d878ef..6427338a6c52e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetLocationSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetLocationSuiteBase.scala @@ -63,7 +63,7 @@ trait AlterNamespaceSetLocationSuiteBase extends QueryTest with DDLCommandTestUt } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`not_exist`")) + parameters = Map("schemaName" -> s"`$catalog`.`$ns`")) } // Hive catalog does not support "ALTER NAMESPACE ... SET LOCATION", thus diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesSuiteBase.scala index 7f5b3de4865c9..d2f2d75d86ce9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesSuiteBase.scala @@ -50,7 +50,7 @@ trait AlterNamespaceSetPropertiesSuiteBase extends QueryTest with DDLCommandTest } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> s"`$ns`")) + parameters = Map("schemaName" -> s"`$catalog`.`$ns`")) } test("basic test") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala index 1d43cc5938487..c00f3f99f41f9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala @@ -57,7 +57,7 @@ trait AlterNamespaceUnsetPropertiesSuiteBase extends QueryTest with DDLCommandTe } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> s"`$ns`")) + parameters = Map("schemaName" -> s"`$catalog`.`$ns`")) } test("basic test") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeNamespaceSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeNamespaceSuiteBase.scala index 1309ba05b3f19..6945352564e1e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeNamespaceSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeNamespaceSuiteBase.scala @@ -44,7 +44,7 @@ trait DescribeNamespaceSuiteBase extends QueryTest with DDLCommandTestUtils { } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`db1`")) + parameters = Map("schemaName" -> s"`$catalog`.`$ns`")) } test("Keep the legacy output schema") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala index 6eb4465124a69..2243517550b2c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala @@ -38,7 +38,7 @@ trait DropNamespaceSuiteBase extends QueryTest with DDLCommandTestUtils { protected def isCasePreserving: Boolean = true protected def namespaceAlias: String = "namespace" - protected def checkNamespace(expected: Seq[String]) = { + protected def checkNamespace(expected: Seq[String]): Unit = { val df = spark.sql(s"SHOW NAMESPACES IN $catalog") assert(df.schema === new StructType().add("namespace", StringType, false)) checkAnswer(df, expected.map(Row(_))) @@ -65,7 +65,7 @@ trait DropNamespaceSuiteBase extends QueryTest with DDLCommandTestUtils { } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`unknown`")) + parameters = Map("schemaName" -> s"`$catalog`.`unknown`")) } test("drop non-empty namespace with a non-cascading mode") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala index d6b91bcf3eb8e..1890726a376ba 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala @@ -169,7 +169,7 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { sql(s"SHOW TABLES IN $catalog.nonexist") }, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`nonexist`")) + parameters = Map("schemaName" -> s"`$catalog`.`nonexist`")) } test("show table extended in a not existing namespace") { @@ -178,7 +178,7 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { sql(s"SHOW TABLE EXTENDED IN $catalog.nonexist LIKE '*tbl*'") }, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`nonexist`")) + parameters = Map("schemaName" -> s"`$catalog`.`nonexist`")) } test("show table extended with no matching table") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala index 647247cc833dd..cec72b8855291 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala @@ -40,7 +40,7 @@ trait DropNamespaceSuiteBase extends command.DropNamespaceSuiteBase sql(s"DROP NAMESPACE default") }, errorClass = "UNSUPPORTED_FEATURE.DROP_DATABASE", - parameters = Map("database" -> "`default`") + parameters = Map("database" -> s"`$catalog`.`default`") ) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowNamespacesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowNamespacesSuite.scala index ee5ac09e00892..85a46cfb93233 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowNamespacesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowNamespacesSuite.scala @@ -50,7 +50,7 @@ trait ShowNamespacesSuiteBase extends command.ShowNamespacesSuiteBase { } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`dummy`")) + parameters = Map("schemaName" -> s"`$catalog`.`dummy`")) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala index 4de74af250006..50988e133005a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala @@ -28,7 +28,7 @@ import org.scalatest.BeforeAndAfter import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.util.quoteIdentifier import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Column, Identifier, NamespaceChange, SupportsNamespaces, TableCatalog, TableChange, V1Table} @@ -1039,7 +1039,7 @@ class V2SessionCatalogNamespaceSuite extends V2SessionCatalogBaseSuite { assert(catalog.namespaceExists(testNs) === false) - val exc = intercept[NoSuchDatabaseException] { + val exc = intercept[NoSuchNamespaceException] { catalog.createTable(testIdent, columns, emptyTrans, emptyProps) } @@ -1156,7 +1156,7 @@ class V2SessionCatalogNamespaceSuite extends V2SessionCatalogBaseSuite { assert(catalog.namespaceExists(testNs) === false) - val exc = intercept[NoSuchDatabaseException] { + val exc = intercept[NoSuchNamespaceException] { catalog.alterNamespace(testNs, NamespaceChange.setProperty("property", "value")) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala index 3d5e2851fa7ba..55be6102a8535 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MultiDatabaseSuite.scala @@ -273,7 +273,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`d:b`")) + parameters = Map("schemaName" -> "`spark_catalog`.`d:b`")) } { @@ -282,7 +282,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`d:b`")) + parameters = Map("schemaName" -> "`spark_catalog`.`d:b`")) } withTempDir { dir => @@ -314,7 +314,7 @@ class MultiDatabaseSuite extends QueryTest with SQLTestUtils with TestHiveSingle } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`d:b`")) + parameters = Map("schemaName" -> "`spark_catalog`.`d:b`")) } } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index fd437e7dc954f..b959459eb3df8 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -1359,7 +1359,7 @@ class HiveDDLSuite sql("DROP DATABASE default") }, errorClass = "UNSUPPORTED_FEATURE.DROP_DATABASE", - parameters = Map("database" -> "`default`") + parameters = Map("database" -> "`spark_catalog`.`default`") ) // SQLConf.CASE_SENSITIVE does not affect the result @@ -1373,7 +1373,7 @@ class HiveDDLSuite case _ => "_LEGACY_ERROR_TEMP_3065" }, parameters = caseSensitive match { - case "false" => Map("database" -> "`default`") + case "false" => Map("database" -> "`spark_catalog`.`default`") case _ => Map( "clazz" -> "org.apache.hadoop.hive.ql.metadata.HiveException", "msg" -> "MetaException(message:Can not drop default database)") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index 4d23ac0639b3e..5ccb7f0d1f84a 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -1394,7 +1394,7 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd sql("USE not_existing_db") }, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> "`not_existing_db`")) + parameters = Map("schemaName" -> "`spark_catalog`.`not_existing_db`")) sql(s"USE $currentDatabase") assert(currentDatabase == sql("select current_database()").first().getString(0))