File tree Expand file tree Collapse file tree
sql/hive/src/test/scala/org/apache/spark/sql/parquet Expand file tree Collapse file tree Original file line number Diff line number Diff line change @@ -31,7 +31,7 @@ import org.apache.spark.sql.hive.test.TestHive._
3131case class ParquetData (intField : Int , stringField : String )
3232// The data that also includes the partitioning key
3333case class ParquetDataWithKey (p : Int , intField : Int , stringField : String )
34-
34+ case class ParquetDataWithKeyAndComplexTypes ( p : Int , intField : Int , stringField : String )
3535
3636/**
3737 * A suite to test the automatic conversion of metastore tables with parquet data to use the
@@ -69,6 +69,22 @@ class ParquetMetastoreSuite extends ParquetTest {
6969 location ' ${partitionedTableDirWithKey.getCanonicalPath}'
7070 """ )
7171
72+ sql(s """
73+ create external table partitioned_parquet_with_key_and_complextypes
74+ (
75+ intField INT,
76+ structField STRUCT<intStructField INT, stringStructField STRING>,
77+ arrayField ARRAY<INT>,
78+ stringField STRING
79+ )
80+ PARTITIONED BY (p int)
81+ ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
82+ STORED AS
83+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
84+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
85+ location ' ${partitionedTableDirWithKey.getCanonicalPath}'
86+ """ )
87+
7288 sql(s """
7389 create external table normal_parquet
7490 (
You can’t perform that action at this time.
0 commit comments