Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ statement
| SHOW TABLES ((FROM | IN) multipartIdentifier)?
(LIKE? pattern=STRING)? #showTables
| SHOW TABLE EXTENDED ((FROM | IN) ns=multipartIdentifier)?
LIKE pattern=STRING partitionSpec? #showTable
LIKE pattern=STRING partitionSpec? #showTableExtended
| SHOW TBLPROPERTIES table=multipartIdentifier
('(' key=tablePropertyKey ')')? #showTblProperties
| SHOW COLUMNS (FROM | IN) table=multipartIdentifier
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -847,6 +847,8 @@ class Analyzer(override val catalogManager: CatalogManager)
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case s @ ShowTables(UnresolvedNamespace(Seq()), _) =>
s.copy(namespace = ResolvedNamespace(currentCatalog, catalogManager.currentNamespace))
case s @ ShowTableExtended(UnresolvedNamespace(Seq()), _, _) =>
s.copy(namespace = ResolvedNamespace(currentCatalog, catalogManager.currentNamespace))
case s @ ShowViews(UnresolvedNamespace(Seq()), _) =>
s.copy(namespace = ResolvedNamespace(currentCatalog, catalogManager.currentNamespace))
case UnresolvedNamespace(Seq()) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3199,13 +3199,18 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
}

/**
* Create a [[ShowTableStatement]] command.
* Create a [[ShowTableExtended]] command.
*/
override def visitShowTable(ctx: ShowTableContext): LogicalPlan = withOrigin(ctx) {
ShowTableStatement(
Option(ctx.ns).map(visitMultipartIdentifier),
override def visitShowTableExtended(
ctx: ShowTableExtendedContext): LogicalPlan = withOrigin(ctx) {
val multiPart = Option(ctx.multipartIdentifier).map(visitMultipartIdentifier)
val partitionKeys = Option(ctx.partitionSpec).map { specCtx =>
UnresolvedPartitionSpec(visitNonOptionalPartitionSpec(specCtx), None)
}
ShowTableExtended(
UnresolvedNamespace(multiPart.getOrElse(Seq.empty[String])),
string(ctx.pattern),
Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
partitionKeys)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -385,15 +385,6 @@ case class InsertIntoStatement(
override def children: Seq[LogicalPlan] = query :: Nil
}

/**
* A SHOW TABLE EXTENDED statement, as parsed from SQL.
*/
case class ShowTableStatement(
namespace: Option[Seq[String]],
pattern: String,
partitionSpec: Option[TablePartitionSpec])
extends ParsedStatement

/**
* A CREATE NAMESPACE statement, as parsed from SQL.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog._
import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, ColumnChange}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.types.{DataType, MetadataBuilder, StringType, StructType}
import org.apache.spark.sql.types.{BooleanType, DataType, MetadataBuilder, StringType, StructType}

/**
* Base trait for DataSourceV2 write commands
Expand Down Expand Up @@ -464,7 +464,7 @@ case class RenameTable(
newIdent: Identifier) extends Command

/**
* The logical plan of the SHOW TABLE command.
* The logical plan of the SHOW TABLES command.
*/
case class ShowTables(
namespace: LogicalPlan,
Expand All @@ -476,6 +476,22 @@ case class ShowTables(
AttributeReference("tableName", StringType, nullable = false)())
}

/**
* The logical plan of the SHOW TABLE EXTENDED command.
*/
case class ShowTableExtended(
namespace: LogicalPlan,
pattern: String,
partitionSpec: Option[PartitionSpec]) extends Command {
override def children: Seq[LogicalPlan] = namespace :: Nil

override val output: Seq[Attribute] = Seq(
AttributeReference("namespace", StringType, nullable = false)(),
AttributeReference("tableName", StringType, nullable = false)(),
AttributeReference("isTemporary", BooleanType, nullable = false)(),
AttributeReference("information", StringType, nullable = false)())
}

/**
* The logical plan of the SHOW VIEWS command.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,14 +384,20 @@ class ResolveSessionCatalog(
}
ShowTablesCommand(Some(ns.head), pattern)

case ShowTableStatement(ns, pattern, partitionsSpec) =>
val db = ns match {
case Some(ns) if ns.length != 1 =>
throw new AnalysisException(
s"The database name is not valid: ${ns.quoted}")
case _ => ns.map(_.head)
case ShowTableExtended(
SessionCatalogAndNamespace(_, ns),
pattern,
partitionSpec @ (None | Some(UnresolvedPartitionSpec(_, _)))) =>
assert(ns.nonEmpty)
if (ns.length != 1) {
throw new AnalysisException(
s"The database name is not valid: ${ns.quoted}")
}
ShowTablesCommand(db, Some(pattern), true, partitionsSpec)
ShowTablesCommand(
databaseName = Some(ns.head),
tableIdentifierPattern = Some(pattern),
isExtended = true,
partitionSpec.map(_.asInstanceOf[UnresolvedPartitionSpec].spec))

// ANALYZE TABLE works on permanent views if the views are cached.
case AnalyzeTable(ResolvedV1TableOrViewIdentifier(ident), partitionSpec, noScan) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,9 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
case r @ ShowTables(ResolvedNamespace(catalog, ns), pattern) =>
ShowTablesExec(r.output, catalog.asTableCatalog, ns, pattern) :: Nil

case _: ShowTableExtended =>
throw new AnalysisException("SHOW TABLE EXTENDED is not supported for v2 tables.")

case SetCatalogAndNamespace(catalogManager, catalogName, ns) =>
SetCatalogAndNamespaceExec(catalogManager, catalogName, ns) :: Nil

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

package org.apache.spark.sql.execution.command

import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedNamespace}
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedNamespace, UnresolvedPartitionSpec}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.plans.logical.{ShowTables, ShowTableStatement}
import org.apache.spark.sql.catalyst.plans.logical.{ShowTableExtended, ShowTables}
import org.apache.spark.sql.test.SharedSparkSession

class ShowTablesParserSuite extends AnalysisTest with SharedSparkSession {
Expand Down Expand Up @@ -52,25 +52,32 @@ class ShowTablesParserSuite extends AnalysisTest with SharedSparkSession {
test("show table extended") {
comparePlans(
parsePlan("SHOW TABLE EXTENDED LIKE '*test*'"),
ShowTableStatement(None, "*test*", None))
ShowTableExtended(UnresolvedNamespace(Seq.empty[String]), "*test*", None))
comparePlans(
parsePlan(s"SHOW TABLE EXTENDED FROM $catalog.ns1.ns2 LIKE '*test*'"),
ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", None))
ShowTableExtended(UnresolvedNamespace(Seq(catalog, "ns1", "ns2")), "*test*", None))
comparePlans(
parsePlan(s"SHOW TABLE EXTENDED IN $catalog.ns1.ns2 LIKE '*test*'"),
ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", None))
ShowTableExtended(UnresolvedNamespace(Seq(catalog, "ns1", "ns2")), "*test*", None))
comparePlans(
parsePlan("SHOW TABLE EXTENDED LIKE '*test*' PARTITION(ds='2008-04-09', hr=11)"),
ShowTableStatement(None, "*test*", Some(Map("ds" -> "2008-04-09", "hr" -> "11"))))
ShowTableExtended(
UnresolvedNamespace(Seq.empty[String]),
"*test*",
Some(UnresolvedPartitionSpec(Map("ds" -> "2008-04-09", "hr" -> "11")))))
comparePlans(
parsePlan(s"SHOW TABLE EXTENDED FROM $catalog.ns1.ns2 LIKE '*test*' " +
"PARTITION(ds='2008-04-09')"),
ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*",
Some(Map("ds" -> "2008-04-09"))))
ShowTableExtended(
UnresolvedNamespace(Seq(catalog, "ns1", "ns2")),
"*test*",
Some(UnresolvedPartitionSpec(Map("ds" -> "2008-04-09")))))
comparePlans(
parsePlan(s"SHOW TABLE EXTENDED IN $catalog.ns1.ns2 LIKE '*test*' " +
"PARTITION(ds='2008-04-09')"),
ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*",
Some(Map("ds" -> "2008-04-09"))))
ShowTableExtended(
UnresolvedNamespace(Seq(catalog, "ns1", "ns2")),
"*test*",
Some(UnresolvedPartitionSpec(Map("ds" -> "2008-04-09")))))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v2

import org.apache.spark.SparkConf
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.connector.InMemoryTableCatalog
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.test.SharedSparkSession
Expand Down Expand Up @@ -74,7 +73,7 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase with SharedSparkSessio
val e = intercept[AnalysisException] {
sql(sqlCommand)
}
assert(e.message.contains(s"The database name is not valid: ${namespace}"))
assert(e.message.contains(s"SHOW TABLE EXTENDED is not supported for v2 tables"))
}

val namespace = s"$catalog.ns1.ns2"
Expand All @@ -101,10 +100,10 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase with SharedSparkSessio
val table = "people"
withTable(s"$catalog.$table") {
sql(s"CREATE TABLE $catalog.$table (name STRING, id INT) $defaultUsing")
val errMsg = intercept[NoSuchDatabaseException] {
val errMsg = intercept[AnalysisException] {
sql(s"SHOW TABLE EXTENDED FROM $catalog LIKE '*$table*'").collect()
}.getMessage
assert(errMsg.contains(s"Database '$catalog' not found"))
assert(errMsg.contains("SHOW TABLE EXTENDED is not supported for v2 tables"))
}
}
}