Skip to content

Commit 89febc8

Browse files
committed
Remove the ColumnarFileFormat type to put it in another PR
1 parent 8ead76e commit 89febc8

File tree

3 files changed

+1
-55
lines changed

3 files changed

+1
-55
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -301,20 +301,7 @@ case class FileSourceScanExec(
301301
} getOrElse {
302302
withOptPartitionCount
303303
}
304-
305-
val withOptColumnCount = withSelectedBucketsCount.fileFormat match {
306-
case columnar: ColumnarFileFormat =>
307-
SparkSession
308-
.getActiveSession
309-
.map { sparkSession =>
310-
val columnCount = columnar.columnCountForSchema(sparkSession, requiredSchema)
311-
withSelectedBucketsCount + ("ColumnCount" -> columnCount.toString)
312-
} getOrElse {
313-
withSelectedBucketsCount
314-
}
315-
case _ => withSelectedBucketsCount
316-
}
317-
withOptColumnCount
304+
withSelectedBucketsCount
318305
}
319306

320307
private lazy val inputRDD: RDD[InternalRow] = {

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ColumnarFileFormat.scala

Lines changed: 0 additions & 32 deletions
This file was deleted.

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@ import org.apache.spark.util.{SerializableConfiguration, ThreadUtils}
5656

5757
class ParquetFileFormat
5858
extends FileFormat
59-
with ColumnarFileFormat
6059
with DataSourceRegister
6160
with Logging
6261
with Serializable {
@@ -74,14 +73,6 @@ class ParquetFileFormat
7473

7574
override def equals(other: Any): Boolean = other.isInstanceOf[ParquetFileFormat]
7675

77-
override def columnCountForSchema(sparkSession: SparkSession, readSchema: StructType): Int = {
78-
val converter = new SparkToParquetSchemaConverter(
79-
sparkSession.sessionState.conf.writeLegacyParquetFormat,
80-
sparkSession.sessionState.conf.parquetOutputTimestampType)
81-
val parquetSchema = converter.convert(readSchema)
82-
parquetSchema.getPaths.size
83-
}
84-
8576
override def prepareWrite(
8677
sparkSession: SparkSession,
8778
job: Job,

0 commit comments

Comments
 (0)