Skip to content

Commit 7f193ca

Browse files
committed
Address more review comments.
1 parent 97dc04c commit 7f193ca

3 files changed

Lines changed: 9 additions & 12 deletions

File tree

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,19 +23,17 @@ import scala.collection.mutable
2323

2424
import org.apache.spark.sql.{AnalysisException, SaveMode}
2525
import org.apache.spark.sql.catalog.v2.{CatalogPlugin, Identifier, LookupCatalog, TableCatalog}
26-
import org.apache.spark.sql.catalog.v2.expressions.{FieldReference, IdentityTransform, Transform}
26+
import org.apache.spark.sql.catalog.v2.expressions.Transform
2727
import org.apache.spark.sql.catalyst.TableIdentifier
28-
import org.apache.spark.sql.catalyst.analysis.{CastSupport, UnresolvedAttribute, UnresolvedRelation}
28+
import org.apache.spark.sql.catalyst.analysis.CastSupport
2929
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTableType, CatalogUtils, UnresolvedCatalogRelation}
30-
import org.apache.spark.sql.catalyst.expressions.{Alias, And, Cast, EqualTo, Expression, Literal}
31-
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, CreateV2Table, DropTable, InsertIntoTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, Project, ReplaceTable, ReplaceTableAsSelect}
32-
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement, InsertIntoStatement, QualifiedColType, ReplaceTableAsSelectStatement, ReplaceTableStatement}
30+
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan, ReplaceTable, ReplaceTableAsSelect}
31+
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement, QualifiedColType, ReplaceTableAsSelectStatement, ReplaceTableStatement}
3332
import org.apache.spark.sql.catalyst.rules.Rule
3433
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, DropTableCommand}
3534
import org.apache.spark.sql.execution.datasources.v2.{CatalogTableAsV2, DataSourceV2Relation}
3635
import org.apache.spark.sql.internal.SQLConf
37-
import org.apache.spark.sql.internal.SQLConf.PartitionOverwriteMode
38-
import org.apache.spark.sql.sources.v2.{Table, TableProvider}
36+
import org.apache.spark.sql.sources.v2.TableProvider
3937
import org.apache.spark.sql.types.{HIVE_TYPE_STRING, HiveStringType, MetadataBuilder, StructField, StructType}
4038

4139
case class DataSourceResolution(
@@ -44,7 +42,6 @@ case class DataSourceResolution(
4442
extends Rule[LogicalPlan] with CastSupport {
4543

4644
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
47-
import org.apache.spark.sql.catalog.v2.utils.CatalogV2Util._
4845
import lookup._
4946

5047
lazy val v2SessionCatalog: CatalogPlugin = lookup.sessionCatalog

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1363,7 +1363,7 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
13631363
val t1 = "testcat.ns1.ns2.tbl"
13641364
val t2 = "testcat2.db.tbl"
13651365
withTable(t1, t2) {
1366-
sql(s"CREATE TABLE $t1 USING foo AS TABLE source")
1366+
sql(s"CREATE TABLE $t1 USING foo AS SELECT * FROM source")
13671367
sql(s"CREATE TABLE $t2 (id bigint, data string) USING foo")
13681368
sql(s"INSERT INTO $t2 SELECT * FROM $t1")
13691369
checkAnswer(spark.table(t2), spark.table("source"))
@@ -1456,7 +1456,7 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn
14561456
test("InsertInto: overwrite non-partitioned table") {
14571457
val t1 = "testcat.ns1.ns2.tbl"
14581458
withTable(t1) {
1459-
sql(s"CREATE TABLE $t1 USING foo AS TABLE source")
1459+
sql(s"CREATE TABLE $t1 USING foo AS SELECT * FROM source")
14601460
sql(s"INSERT OVERWRITE TABLE $t1 SELECT * FROM source2")
14611461
checkAnswer(spark.table(t1), spark.table("source2"))
14621462
}

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/TestInMemoryTableCatalog.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ class InMemoryTable(
230230
case _ =>
231231
throw new IllegalArgumentException(s"Unknown filter attribute: $attr")
232232
}
233-
case f @ _ =>
233+
case f =>
234234
throw new IllegalArgumentException(s"Unsupported filter type: $f")
235235
}
236236
}
@@ -248,7 +248,7 @@ class InMemoryTable(
248248

249249
private object TruncateAndAppend extends TestBatchWrite {
250250
override def commit(messages: Array[WriterCommitMessage]): Unit = dataMap.synchronized {
251-
dataMap = mutable.Map.empty
251+
dataMap.clear
252252
withData(messages.map(_.asInstanceOf[BufferedRows]))
253253
}
254254
}

0 commit comments

Comments
 (0)