@@ -43,13 +43,13 @@ import org.apache.spark.{SPARK_VERSION_SHORT, SparkException}
4343import org .apache .spark .sql ._
4444import org .apache .spark .sql .catalyst .{InternalRow , ScalaReflection }
4545import org .apache .spark .sql .catalyst .expressions .{GenericInternalRow , UnsafeRow }
46- import org .apache .spark .sql .catalyst .util .DateTimeUtils
46+ import org .apache .spark .sql .catalyst .util .{ DateTimeUtils , IntervalUtils }
4747import org .apache .spark .sql .execution .datasources .SQLHadoopMapReduceCommitProtocol
4848import org .apache .spark .sql .functions ._
4949import org .apache .spark .sql .internal .SQLConf
5050import org .apache .spark .sql .test .SharedSparkSession
5151import org .apache .spark .sql .types ._
52- import org .apache .spark .unsafe .types .{ CalendarInterval , UTF8String }
52+ import org .apache .spark .unsafe .types .UTF8String
5353
5454// Write support class for nested groups: ParquetWriter initializes GroupWriteSupport
5555// with an empty configuration (it is after all not intended to be used in this way?)
@@ -751,7 +751,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSparkSession
751751 Decimal (" 1234.23456" ),
752752 DateTimeUtils .fromJavaDate(java.sql.Date .valueOf(" 2015-01-01" )),
753753 DateTimeUtils .fromJavaTimestamp(java.sql.Timestamp .valueOf(" 2015-01-01 23:50:59.123" )),
754- CalendarInterval .fromString(" interval 1 month 2 microsecond" ))
754+ IntervalUtils .fromString(" interval 1 month 2 microsecond" ))
755755
756756 dataTypes.zip(constantValues).foreach { case (dt, v) =>
757757 val schema = StructType (StructField (" pcol" , dt) :: Nil )
0 commit comments