Skip to content

Commit 63d1b57

Browse files
Cleaning up and Scalastyle
1 parent 88e6bdb commit 63d1b57

File tree

3 files changed

+2
-15
lines changed

3 files changed

+2
-15
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ object StructType {
233233
StructType(attributes.map(a => StructField(a.name, a.dataType, a.nullable)))
234234
}
235235

236-
//def apply(fields: Seq[StructField]) = new StructType(fields.toIndexedSeq)
236+
// def apply(fields: Seq[StructField]) = new StructType(fields.toIndexedSeq)
237237
}
238238

239239
case class StructType(fields: Seq[StructField]) extends DataType {

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,6 @@ case class InsertIntoParquetTable(
178178
ParquetOutputFormat.setWriteSupportClass(job, writeSupport)
179179

180180
val conf = ContextUtil.getConfiguration(job)
181-
//conf.set(RowWriteSupport.PARQUET_ROW_SCHEMA, StructType.fromAttributes(relation.output).toString)
182181
RowWriteSupport.setSchema(relation.output, conf)
183182

184183
val fspath = new Path(relation.path)

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableSupport.scala

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -111,24 +111,12 @@ private[parquet] object RowReadSupport {
111111
*/
112112
private[parquet] class RowWriteSupport extends WriteSupport[Row] with Logging {
113113

114-
/*def setSchema(schema: Seq[Attribute], configuration: Configuration) {
115-
configuration.set(
116-
RowWriteSupport.PARQUET_ROW_SCHEMA,
117-
StructType.fromAttributes(schema).toString)
118-
configuration.set(
119-
ParquetOutputFormat.WRITER_VERSION,
120-
ParquetProperties.WriterVersion.PARQUET_1_0.toString)
121-
} */
122-
123-
private[parquet] var schema: MessageType = null
124114
private[parquet] var writer: RecordConsumer = null
125115
private[parquet] var attributes: Seq[Attribute] = null
126116

127117
override def init(configuration: Configuration): WriteSupport.WriteContext = {
128-
//attributes = DataType(configuration.get(RowWriteSupport.PARQUET_ROW_SCHEMA))
129118
attributes = if (attributes == null) RowWriteSupport.getSchema(configuration) else attributes
130-
schema = if (schema == null) ParquetTypesConverter.convertFromAttributes(attributes) else schema
131-
// ParquetTypesConverter.convertToAttributes(schema)
119+
132120
log.debug(s"write support initialized for requested schema $attributes")
133121
ParquetRelation.enableLogForwarding()
134122
new WriteSupport.WriteContext(

0 commit comments

Comments
 (0)