-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-19228][SQL] Introduce tryParseDate method to process csv date,… #20140
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -90,7 +90,10 @@ private[csv] object CSVInferSchema { | |
| // DecimalTypes have different precisions and scales, so we try to find the common type. | ||
| findTightestCommonType(typeSoFar, tryParseDecimal(field, options)).getOrElse(StringType) | ||
| case DoubleType => tryParseDouble(field, options) | ||
| case TimestampType => tryParseTimestamp(field, options) | ||
| case DateType => tryParseDate(field, options) | ||
| case TimestampType => | ||
| findTightestCommonType(typeSoFar, tryParseTimestamp(field, options)).getOrElse( | ||
| tryParseBoolean(field, options)) | ||
| case BooleanType => tryParseBoolean(field, options) | ||
| case StringType => StringType | ||
| case other: DataType => | ||
|
|
@@ -140,14 +143,23 @@ private[csv] object CSVInferSchema { | |
| private def tryParseDouble(field: String, options: CSVOptions): DataType = { | ||
| if ((allCatch opt field.toDouble).isDefined || isInfOrNan(field, options)) { | ||
| DoubleType | ||
| } else { | ||
| tryParseDate(field, options) | ||
| } | ||
| } | ||
|
|
||
| private def tryParseDate(field: String, options: CSVOptions): DataType = { | ||
| // This case infers a custom `dateFormat` is set. | ||
| if ((allCatch opt options.dateFormatter.parse(field)).isDefined) { | ||
| DateType | ||
| } else { | ||
| tryParseTimestamp(field, options) | ||
| } | ||
| } | ||
|
|
||
| private def tryParseTimestamp(field: String, options: CSVOptions): DataType = { | ||
| // This case infers a custom `dataFormat` is set. | ||
| if ((allCatch opt options.timestampFormat.parse(field)).isDefined) { | ||
| // This case infers a custom `timestampFormat` is set. | ||
| if ((allCatch opt options.timestampFormatter.parse(field)).isDefined) { | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Should we replace it to
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Probably, adding a configuration to control this behaviour looks preferred in this case. |
||
| TimestampType | ||
| } else if ((allCatch opt DateTimeUtils.stringToTime(field)).isDefined) { | ||
| // We keep this for backwards compatibility. | ||
|
|
@@ -216,6 +228,8 @@ private[csv] object CSVInferSchema { | |
| } else { | ||
| Some(DecimalType(range + scale, scale)) | ||
| } | ||
| // By design 'TimestampType' (8 bytes) is larger than 'DateType' (4 bytes). | ||
| case (t1: DateType, t2: TimestampType) => Some(TimestampType) | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we should do the opposite case too |
||
|
|
||
| case _ => None | ||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,6 +18,7 @@ | |
| package org.apache.spark.sql.execution.datasources.csv | ||
|
|
||
| import java.nio.charset.StandardCharsets | ||
| import java.time.format.{DateTimeFormatter, ResolverStyle} | ||
| import java.util.{Locale, TimeZone} | ||
|
|
||
| import com.univocity.parsers.csv.{CsvParserSettings, CsvWriterSettings, UnescapedQuoteHandling} | ||
|
|
@@ -150,6 +151,16 @@ class CSVOptions( | |
|
|
||
| val isCommentSet = this.comment != '\u0000' | ||
|
|
||
| def dateFormatter: DateTimeFormatter = { | ||
|
||
| DateTimeFormatter.ofPattern(dateFormat.getPattern) | ||
| .withLocale(Locale.US).withZone(timeZone.toZoneId).withResolverStyle(ResolverStyle.SMART) | ||
| } | ||
|
|
||
| def timestampFormatter: DateTimeFormatter = { | ||
| DateTimeFormatter.ofPattern(timestampFormat.getPattern) | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Mind if I ask to elaborate
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. DateTimeFormatter is a standard time library from java 8. FastDateFormat can't properly parse date and timestamp. I can create some test cases to prove it, but I need many time for that. Also, FastDateFormat does not meet the ISO8601: https://en.wikipedia.org/wiki/ISO_8601 |
||
| .withLocale(Locale.US).withZone(timeZone.toZoneId).withResolverStyle(ResolverStyle.SMART) | ||
| } | ||
|
|
||
| def asWriterSettings: CsvWriterSettings = { | ||
| val writerSettings = new CsvWriterSettings() | ||
| val format = writerSettings.getFormat | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,4 @@ | ||
| timestamp,date | ||
| 26/08/2015 22:31:46.913,27/09/2015 | ||
| 27/10/2014 22:33:31.601,26/12/2016 | ||
| 28/01/2016 22:33:52.888,28/01/2017 |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Mind elaborating why we should find the wider type here?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Sorry, your question is not really clear for me.
We have to try parse object as DateType first, because date always can be parsed as date and as timestamp (begin of day).
Current implementation of spark ignores dates and it is always parsing them as timestamps
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I mean, it wasn't clear why we need
findTightestCommonType. I thoughtcase TimestampType => tryParseTimestamp(field, options)will work.