Skip to content

Commit 79d81d5

Browse files
Replacing field names for array and map in WriteSupport
1 parent f466ff0 commit 79d81d5

2 files changed

Lines changed: 10 additions & 8 deletions

File tree

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -187,13 +187,13 @@ private[parquet] class RowWriteSupport extends WriteSupport[Row] with Logging {
187187
val elementType = schema.elementType
188188
writer.startGroup()
189189
if (array.size > 0) {
190-
writer.startField("values", 0)
190+
writer.startField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
191191
var i = 0
192192
while(i < array.size) {
193193
writeValue(elementType, array(i))
194194
i = i + 1
195195
}
196-
writer.endField("values", 0)
196+
writer.endField(CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME, 0)
197197
}
198198
writer.endGroup()
199199
}
@@ -202,20 +202,20 @@ private[parquet] class RowWriteSupport extends WriteSupport[Row] with Logging {
202202
private[parquet] def writeMap(schema: MapType, map: Map[_, _]): Unit = {
203203
writer.startGroup()
204204
if (map.size > 0) {
205-
writer.startField("map", 0)
205+
writer.startField(CatalystConverter.MAP_SCHEMA_NAME, 0)
206206
writer.startGroup()
207-
writer.startField("key", 0)
207+
writer.startField(CatalystConverter.MAP_KEY_SCHEMA_NAME, 0)
208208
for(key <- map.keys) {
209209
writeValue(schema.keyType, key)
210210
}
211-
writer.endField("key", 0)
212-
writer.startField("value", 1)
211+
writer.endField(CatalystConverter.MAP_KEY_SCHEMA_NAME, 0)
212+
writer.startField(CatalystConverter.MAP_VALUE_SCHEMA_NAME, 1)
213213
for(value <- map.values) {
214214
writeValue(schema.valueType, value)
215215
}
216-
writer.endField("value", 1)
216+
writer.endField(CatalystConverter.MAP_VALUE_SCHEMA_NAME, 1)
217217
writer.endGroup()
218-
writer.endField("map", 0)
218+
writer.endField(CatalystConverter.MAP_SCHEMA_NAME, 0)
219219
}
220220
writer.endGroup()
221221
}

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -553,6 +553,8 @@ class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterA
553553
}
554554

555555
test("Writing out Addressbook and reading it back in") {
556+
// TODO: find out why CatalystConverter.ARRAY_ELEMENTS_SCHEMA_NAME
557+
// has no effect in this test case
556558
implicit def anyToRow(value: Any): Row = value.asInstanceOf[Row]
557559
val tmpdir = Utils.createTempDir()
558560
val result = TestSQLContext

0 commit comments

Comments
 (0)