Skip to content

Commit 428e82a

Browse files
committed
Try this way
1 parent aac9503 commit 428e82a

File tree

3 files changed

+18
-14
lines changed

3 files changed

+18
-14
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/catalog/v2/utils/CatalogV2Util.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import scala.collection.JavaConverters._
2424

2525
import org.apache.spark.sql.catalog.v2.{CatalogPlugin, Identifier, NamespaceChange, TableChange}
2626
import org.apache.spark.sql.catalog.v2.TableChange.{AddColumn, DeleteColumn, RemoveProperty, RenameColumn, SetProperty, UpdateColumnComment, UpdateColumnType}
27-
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
27+
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException}
2828
import org.apache.spark.sql.sources.v2.Table
2929
import org.apache.spark.sql.types.{ArrayType, MapType, StructField, StructType}
3030

@@ -219,5 +219,6 @@ object CatalogV2Util {
219219
Option(catalog.asTableCatalog.loadTable(ident))
220220
} catch {
221221
case _: NoSuchTableException => None
222+
case _: NoSuchDatabaseException => None
222223
}
223224
}

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ import org.apache.spark.annotation.Stable
2525
import org.apache.spark.sql.catalog.v2.{CatalogPlugin, Identifier, TableCatalog}
2626
import org.apache.spark.sql.catalog.v2.expressions._
2727
import org.apache.spark.sql.catalyst.TableIdentifier
28-
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, NoSuchTableException, UnresolvedRelation}
28+
import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, UnresolvedRelation}
2929
import org.apache.spark.sql.catalyst.catalog._
3030
import org.apache.spark.sql.catalyst.expressions.Literal
31-
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, InsertIntoTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, ReplaceTableAsSelect}
31+
import org.apache.spark.sql.catalyst.plans.logical._
3232
import org.apache.spark.sql.execution.SQLExecution
3333
import org.apache.spark.sql.execution.command.DDLUtils
3434
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, DataSourceUtils, LogicalRelation}
@@ -37,6 +37,7 @@ import org.apache.spark.sql.internal.SQLConf.PartitionOverwriteMode
3737
import org.apache.spark.sql.sources.{BaseRelation, DataSourceRegister}
3838
import org.apache.spark.sql.sources.v2._
3939
import org.apache.spark.sql.sources.v2.TableCapability._
40+
import org.apache.spark.sql.sources.v2.internal.UnresolvedTable
4041
import org.apache.spark.sql.types.{IntegerType, StructType}
4142
import org.apache.spark.sql.util.CaseInsensitiveStringMap
4243

@@ -537,11 +538,11 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
537538
val tableOpt = try Option(catalog.loadTable(ident)) catch {
538539
case _: NoSuchTableException => None
539540
}
540-
if (tableOpt.exists(_.isInstanceOf[CatalogTableAsV2])) {
541-
return saveAsTable(TableIdentifier(ident.name(), ident.namespace().headOption))
542-
}
543541

544542
val command = (mode, tableOpt) match {
543+
case (_, Some(table: UnresolvedTable)) =>
544+
return saveAsTable(TableIdentifier(ident.name(), ident.namespace().headOption))
545+
545546
case (SaveMode.Append, Some(table)) =>
546547
AppendData.byName(DataSourceV2Relation.create(table), df.logicalPlan)
547548

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,17 @@ import scala.collection.mutable
2424
import org.apache.spark.sql.{AnalysisException, SaveMode}
2525
import org.apache.spark.sql.catalog.v2.{CatalogPlugin, Identifier, LookupCatalog, TableCatalog}
2626
import org.apache.spark.sql.catalog.v2.expressions.Transform
27-
import org.apache.spark.sql.catalyst.TableIdentifier
27+
import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
2828
import org.apache.spark.sql.catalyst.analysis.{CastSupport, UnresolvedAttribute}
29-
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTableType, CatalogUtils, UnresolvedCatalogRelation}
30-
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan, ReplaceTable, ReplaceTableAsSelect}
31-
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DescribeColumnStatement, DescribeTableStatement, DropTableStatement, DropViewStatement, QualifiedColType, ReplaceTableAsSelectStatement, ReplaceTableStatement}
29+
import org.apache.spark.sql.catalyst.catalog._
30+
import org.apache.spark.sql.catalyst.plans.logical._
31+
import org.apache.spark.sql.catalyst.plans.logical.sql._
3232
import org.apache.spark.sql.catalyst.rules.Rule
33-
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand}
33+
import org.apache.spark.sql.execution.command._
3434
import org.apache.spark.sql.execution.datasources.v2.{CatalogTableAsV2, DataSourceV2Relation}
3535
import org.apache.spark.sql.internal.SQLConf
3636
import org.apache.spark.sql.sources.v2.TableProvider
37-
import org.apache.spark.sql.types.{HIVE_TYPE_STRING, HiveStringType, MetadataBuilder, StructField, StructType}
37+
import org.apache.spark.sql.types._
3838
import org.apache.spark.sql.util.SchemaUtils
3939

4040
case class DataSourceResolution(
@@ -173,8 +173,10 @@ case class DataSourceResolution(
173173
// only top-level adds are supported using AlterTableAddColumnsCommand
174174
AlterTableAddColumnsCommand(table, newColumns.map(convertToStructField))
175175

176-
case DataSourceV2Relation(CatalogTableAsV2(catalogTable), _, _) =>
177-
UnresolvedCatalogRelation(catalogTable)
176+
case DataSourceV2Relation(CatalogTableAsV2(ct), _, _) =>
177+
SubqueryAlias(
178+
AliasIdentifier(ct.identifier.table, ct.identifier.database),
179+
UnresolvedCatalogRelation(ct))
178180

179181
}
180182

0 commit comments

Comments
 (0)