Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ object AnalysisContext {
* [[UnresolvedRelation]]s into fully typed objects using information in a [[SessionCatalog]].
*/
class Analyzer(override val catalogManager: CatalogManager)
extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog with SQLConfHelper {
extends RuleExecutor[LogicalPlan] with CheckAnalysis with SQLConfHelper {

private val v1SessionCatalog: SessionCatalog = catalogManager.v1SessionCatalog

Expand Down Expand Up @@ -277,7 +277,7 @@ class Analyzer(override val catalogManager: CatalogManager)
TypeCoercion.typeCoercionRules ++
extendedResolutionRules : _*),
Batch("Post-Hoc Resolution", Once,
Seq(ResolveNoopDropTable) ++
Seq(ResolveCommandsWithIfExists) ++
postHocResolutionRules: _*),
Batch("Normalize Alter Table", Once, ResolveAlterTableChanges),
Batch("Remove Unresolved Hints", Once,
Expand Down Expand Up @@ -887,6 +887,11 @@ class Analyzer(override val catalogManager: CatalogManager)
u.failAnalysis(s"${ident.quoted} is a temp view. '$cmd' expects a table")
}
u
case u @ UnresolvedView(ident, _, _) =>
lookupTempView(ident).map { _ =>
ResolvedView(ident.asIdentifier, isTemp = true)
}
.getOrElse(u)
case u @ UnresolvedTableOrView(ident, cmd, allowTempView) =>
lookupTempView(ident)
.map { _ =>
Expand Down Expand Up @@ -1111,6 +1116,14 @@ class Analyzer(override val catalogManager: CatalogManager)
case table => table
}.getOrElse(u)

case u @ UnresolvedView(identifier, cmd, relationTypeMismatchHint) =>
lookupTableOrView(identifier).map {
case v: ResolvedView => v
case _ =>
u.failAnalysis(s"${identifier.quoted} is a table. '$cmd' expects a view." +
relationTypeMismatchHint.map(" " + _).getOrElse(""))
}.getOrElse(u)

case u @ UnresolvedTableOrView(identifier, _, _) =>
lookupTableOrView(identifier).getOrElse(u)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ import org.apache.spark.sql.catalyst.optimizer.BooleanSimplification
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.{CharVarcharUtils, TypeUtils}
import org.apache.spark.sql.connector.catalog.{SupportsAtomicPartitionManagement, SupportsPartitionManagement, Table}
import org.apache.spark.sql.connector.catalog.{LookupCatalog, SupportsAtomicPartitionManagement, SupportsPartitionManagement, Table}
import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, After, ColumnPosition, DeleteColumn, RenameColumn, UpdateColumnComment, UpdateColumnNullability, UpdateColumnPosition, UpdateColumnType}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._

/**
* Throws user facing errors when passed invalid queries that fail to analyze.
*/
trait CheckAnalysis extends PredicateHelper {
trait CheckAnalysis extends PredicateHelper with LookupCatalog {

protected def isView(nameParts: Seq[String]): Boolean

Expand Down Expand Up @@ -104,6 +104,15 @@ trait CheckAnalysis extends PredicateHelper {
case u: UnresolvedTable =>
u.failAnalysis(s"Table not found for '${u.commandName}': ${u.multipartIdentifier.quoted}")

case u @ UnresolvedView(NonSessionCatalogAndIdentifier(catalog, ident), cmd, _) =>
u.failAnalysis(
s"Cannot specify catalog `${catalog.name}` for view ${ident.quoted} " +
"because view support in v2 catalog has not been implemented yet. " +
s"$cmd expects a view.")

case u: UnresolvedView =>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we have a clear rule about when hint should be used in error message?

Copy link
Contributor Author

@imback82 imback82 Dec 8, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The rule I am thinking is if an expected "type" of relation is not found. For example, UnresolvedTable is resolved to ResolvedView or UnresolvedView is resolved to ResolvedTable.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

then probably we should make the name clearer: relationTypeMismatchHint

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That sounds good, thanks!

u.failAnalysis(s"View not found for '${u.commandName}': ${u.multipartIdentifier.quoted}")

case u: UnresolvedTableOrView =>
val viewStr = if (u.allowTempView) "view" else "permanent view"
u.failAnalysis(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,11 +187,6 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)

case DropViewStatement(NonSessionCatalogAndTable(catalog, viewName), _) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view ${viewName.quoted} " +
s"because view support in catalog has not been implemented yet")

case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
if !isSessionCatalog(catalog) =>
CreateNamespace(catalog.asNamespaceCatalog, ns, c.ifNotExists, c.properties)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,19 @@

package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.sql.catalyst.plans.logical.{DropTable, LogicalPlan, NoopDropTable}
import org.apache.spark.sql.catalyst.plans.logical.{DropTable, DropView, LogicalPlan, NoopCommand}
import org.apache.spark.sql.catalyst.rules.Rule

/**
* A rule for handling [[DropTable]] logical plan when the table or temp view is not resolved.
* If "ifExists" flag is set to true, the plan is resolved to [[NoopDropTable]],
* which is a no-op command.
* A rule for handling commands when the table or temp view is not resolved.
* These commands support a flag, "ifExists", so that they do not fail when a relation is not
* resolved. If the "ifExists" flag is set to true. the plan is resolved to [[NoopCommand]],
*/
object ResolveNoopDropTable extends Rule[LogicalPlan] {
object ResolveCommandsWithIfExists extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case DropTable(u: UnresolvedTableOrView, ifExists, _) if ifExists =>
NoopDropTable(u.multipartIdentifier)
NoopCommand("DROP TABLE", u.multipartIdentifier)
case DropView(u: UnresolvedView, ifExists) if ifExists =>
NoopCommand("DROP VIEW", u.multipartIdentifier)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,19 @@ case class UnresolvedTable(
override def output: Seq[Attribute] = Nil
}

/**
* Holds the name of a view that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedView]] during analysis.
*/
case class UnresolvedView(
multipartIdentifier: Seq[String],
commandName: String,
relationTypeMismatchHint: Option[String] = None) extends LeafNode {
override lazy val resolved: Boolean = false

override def output: Seq[Attribute] = Nil
}

/**
* Holds the name of a table or view that has yet to be looked up in a catalog. It will
* be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3155,11 +3155,14 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
}

/**
* Create a [[DropViewStatement]] command.
* Create a [[DropView]] command.
*/
override def visitDropView(ctx: DropViewContext): AnyRef = withOrigin(ctx) {
DropViewStatement(
visitMultipartIdentifier(ctx.multipartIdentifier()),
DropView(
UnresolvedView(
visitMultipartIdentifier(ctx.multipartIdentifier()),
"DROP VIEW",
Some("Please use DROP TABLE instead.")),
ctx.EXISTS != null)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -338,13 +338,6 @@ case class AlterViewAsStatement(
originalText: String,
query: LogicalPlan) extends ParsedStatement

/**
* A DROP VIEW statement, as parsed from SQL.
*/
case class DropViewStatement(
viewName: Seq[String],
ifExists: Boolean) extends ParsedStatement

/**
* An INSERT INTO statement, as parsed from SQL.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,9 +419,11 @@ case class DropTable(
}

/**
* The logical plan for handling non-existing table for DROP TABLE command.
* The logical plan for no-op command handling non-existing table.
*/
case class NoopDropTable(multipartIdentifier: Seq[String]) extends Command
case class NoopCommand(
commandName: String,
multipartIdentifier: Seq[String]) extends Command

/**
* The logical plan of the ALTER TABLE command.
Expand Down Expand Up @@ -708,3 +710,12 @@ case class ShowPartitions(
override val output: Seq[Attribute] = Seq(
AttributeReference("partition", StringType, nullable = false)())
}

/**
* The logical plan of the DROP VIEW command.
*/
case class DropView(
child: LogicalPlan,
ifExists: Boolean) extends Command {
override def children: Seq[LogicalPlan] = child :: Nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser
import java.util.Locale

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView}
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource}
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
Expand Down Expand Up @@ -721,13 +721,18 @@ class DDLParserSuite extends AnalysisTest {
}

test("drop view") {
val cmd = "DROP VIEW"
val hint = Some("Please use DROP TABLE instead.")
parseCompare(s"DROP VIEW testcat.db.view",
DropViewStatement(Seq("testcat", "db", "view"), ifExists = false))
parseCompare(s"DROP VIEW db.view", DropViewStatement(Seq("db", "view"), ifExists = false))
DropView(UnresolvedView(Seq("testcat", "db", "view"), cmd, hint), ifExists = false))
parseCompare(s"DROP VIEW db.view",
DropView(UnresolvedView(Seq("db", "view"), cmd, hint), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS db.view",
DropViewStatement(Seq("db", "view"), ifExists = true))
parseCompare(s"DROP VIEW view", DropViewStatement(Seq("view"), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS view", DropViewStatement(Seq("view"), ifExists = true))
DropView(UnresolvedView(Seq("db", "view"), cmd, hint), ifExists = true))
parseCompare(s"DROP VIEW view",
DropView(UnresolvedView(Seq("view"), cmd, hint), ifExists = false))
parseCompare(s"DROP VIEW IF EXISTS view",
DropView(UnresolvedView(Seq("view"), cmd, hint), ifExists = true))
}

private def testCreateOrReplaceDdl(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,9 +352,8 @@ class ResolveSessionCatalog(
}
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = false, purge = purge)

// v1 DROP TABLE supports temp view.
case DropViewStatement(TempViewOrV1Table(name), ifExists) =>
DropTableCommand(name.asTableIdentifier, ifExists, isView = true, purge = false)
case DropView(r: ResolvedView, ifExists) =>
DropTableCommand(r.identifier.asTableIdentifier, ifExists, isView = true, purge = false)

case c @ CreateNamespaceStatement(CatalogAndNamespace(catalog, ns), _, _)
if isSessionCatalog(catalog) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
case DropTable(r: ResolvedTable, ifExists, purge) =>
DropTableExec(r.catalog, r.identifier, ifExists, purge, invalidateCache(r)) :: Nil

case _: NoopDropTable =>
case _: NoopCommand =>
LocalTableScanExec(Nil, Nil) :: Nil

case AlterTable(catalog, ident, _, changes) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2594,6 +2594,13 @@ class DataSourceV2SQLSuite
}
}

test("DROP VIEW is not supported for v2 catalogs") {
assertAnalysisError(
"DROP VIEW testcat.v",
"Cannot specify catalog `testcat` for view v because view support in v2 catalog " +
"has not been implemented yet. DROP VIEW expects a view.")
}

private def testNotSupportedV2Command(
sqlCommand: String,
sqlParams: String,
Expand All @@ -2612,13 +2619,6 @@ class DataSourceV2SQLSuite
assert(e.message.contains(s"$sqlCommand is only supported with v1 tables"))
}

private def testV1CommandSupportingTempView(sqlCommand: String, sqlParams: String): Unit = {
val e = intercept[AnalysisException] {
sql(s"$sqlCommand $sqlParams")
}
assert(e.message.contains(s"$sqlCommand is only supported with temp views or v1 tables"))
}

private def assertAnalysisError(sqlStatement: String, expectedError: String): Unit = {
val errMsg = intercept[AnalysisException] {
sql(sqlStatement)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1363,12 +1363,11 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
createDatabase(catalog, "dbx")
createTable(catalog, tableIdent)
assert(catalog.listTables("dbx") == Seq(tableIdent))

val e = intercept[AnalysisException] {
sql("DROP VIEW dbx.tab1")
}
assert(
e.getMessage.contains("Cannot drop a table with DROP VIEW. Please use DROP TABLE instead"))
assert(e.getMessage.contains(
"dbx.tab1 is a table. 'DROP VIEW' expects a view. Please use DROP TABLE instead."))
}

protected def testSetProperties(isDatasourceTable: Boolean): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,14 @@ class PlanResolutionSuite extends AnalysisTest {
V1Table(t)
}

private val view: V1Table = {
val t = mock(classOf[CatalogTable])
when(t.schema).thenReturn(new StructType().add("i", "int").add("s", "string"))
when(t.tableType).thenReturn(CatalogTableType.VIEW)
when(t.provider).thenReturn(Some(v1Format))
V1Table(t)
}

private val testCat: TableCatalog = {
val newCatalog = mock(classOf[TableCatalog])
when(newCatalog.loadTable(any())).thenAnswer((invocation: InvocationOnMock) => {
Expand All @@ -101,6 +109,7 @@ class PlanResolutionSuite extends AnalysisTest {
case "v2Table" => table
case "v2Table1" => table
case "v2TableWithAcceptAnySchemaCapability" => tableWithAcceptAnySchemaCapability
case "view" => view
case name => throw new NoSuchTableException(name)
}
})
Expand Down Expand Up @@ -148,7 +157,10 @@ class PlanResolutionSuite extends AnalysisTest {
manager
}

def parseAndResolve(query: String, withDefault: Boolean = false): LogicalPlan = {
def parseAndResolve(
query: String,
withDefault: Boolean = false,
checkAnalysis: Boolean = false): LogicalPlan = {
val catalogManager = if (withDefault) {
catalogManagerWithDefault
} else {
Expand All @@ -158,8 +170,13 @@ class PlanResolutionSuite extends AnalysisTest {
override val extendedResolutionRules: Seq[Rule[LogicalPlan]] = Seq(
new ResolveSessionCatalog(catalogManager, _ == Seq("v"), _ => false))
}
// We don't check analysis here, as we expect the plan to be unresolved such as `CreateTable`.
analyzer.execute(CatalystSqlParser.parsePlan(query))
// We don't check analysis here by default, as we expect the plan to be unresolved
// such as `CreateTable`.
val analyzed = analyzer.execute(CatalystSqlParser.parsePlan(query))
if (checkAnalysis) {
analyzer.checkAnalysis(analyzed)
}
analyzed
}

private def parseResolveCompare(query: String, expected: LogicalPlan): Unit =
Expand Down Expand Up @@ -677,6 +694,8 @@ class PlanResolutionSuite extends AnalysisTest {
val viewIdent1 = TableIdentifier("view", Option("db"))
val viewName2 = "view"
val viewIdent2 = TableIdentifier("view", Option("default"))
val tempViewName = "v"
val tempViewIdent = TableIdentifier("v")

parseResolveCompare(s"DROP VIEW $viewName1",
DropTableCommand(viewIdent1, ifExists = false, isView = true, purge = false))
Expand All @@ -686,11 +705,15 @@ class PlanResolutionSuite extends AnalysisTest {
DropTableCommand(viewIdent2, ifExists = false, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW IF EXISTS $viewName2",
DropTableCommand(viewIdent2, ifExists = true, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW $tempViewName",
DropTableCommand(tempViewIdent, ifExists = false, isView = true, purge = false))
parseResolveCompare(s"DROP VIEW IF EXISTS $tempViewName",
DropTableCommand(tempViewIdent, ifExists = true, isView = true, purge = false))
}

test("drop view in v2 catalog") {
intercept[AnalysisException] {
parseAndResolve("DROP VIEW testcat.db.view")
parseAndResolve("DROP VIEW testcat.db.view", checkAnalysis = true)
}.getMessage.toLowerCase(Locale.ROOT).contains(
"view support in catalog has not been implemented")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1048,7 +1048,8 @@ class HiveDDLSuite
val message = intercept[AnalysisException] {
sql("DROP VIEW tab1")
}.getMessage
assert(message.contains("Cannot drop a table with DROP VIEW. Please use DROP TABLE instead"))
assert(message.contains(
"tab1 is a table. 'DROP VIEW' expects a view. Please use DROP TABLE instead."))
}
}

Expand Down