-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-26379][SS] Fix issue on adding current_timestamp/current_date to streaming query #23609
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
afb0dc4
8f30a75
d09845b
687c3e4
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -508,12 +508,20 @@ class MicroBatchExecution( | |
| cd.dataType, cd.timeZoneId) | ||
| } | ||
|
|
||
| // Pre-resolve new attributes to ensure all attributes are resolved before | ||
| // accessing schema of logical plan. Note that it only leverages the information | ||
| // of attributes, so we don't need to concern about the value of literals. | ||
|
|
||
| val newAttrPlanPreResolvedForSchema = newAttributePlan transformAllExpressions { | ||
|
||
| case cbt: CurrentBatchTimestamp => cbt.toLiteral | ||
| } | ||
|
|
||
| val triggerLogicalPlan = sink match { | ||
| case _: Sink => newAttributePlan | ||
| case s: StreamingWriteSupportProvider => | ||
| val writer = s.createStreamingWriteSupport( | ||
| s"$runId", | ||
| newAttributePlan.schema, | ||
| newAttrPlanPreResolvedForSchema.schema, | ||
| outputMode, | ||
| new DataSourceOptions(extraOptions.asJava)) | ||
| WriteToDataSourceV2(new MicroBatchWrite(currentBatchId, writer), newAttributePlan) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1079,6 +1079,42 @@ class StreamSuite extends StreamTest { | |
| assert(query.exception.isEmpty) | ||
| } | ||
| } | ||
|
|
||
| Seq(true, false).foreach { useV2Sink => | ||
|
||
| import org.apache.spark.sql.functions._ | ||
|
||
|
|
||
| val newTestName = "SPARK-26379 Structured Streaming - Exception on adding column to Dataset" + | ||
| s" - use v2 sink - $useV2Sink" | ||
|
|
||
| test(newTestName) { | ||
| val input = MemoryStream[Int] | ||
| val df = input.toDS().withColumn("cur_timestamp", lit(current_timestamp())) | ||
|
||
|
|
||
| def assertBatchOutputAndUpdateLastTimestamp( | ||
| rows: Seq[Row], | ||
| curTimestamp: Long, | ||
| expectedValue: Int): Long = { | ||
| assert(rows.size === 1) | ||
| val row = rows.head | ||
| assert(row.getInt(0) === expectedValue) | ||
| assert(row.getTimestamp(1).getTime > curTimestamp) | ||
| row.getTimestamp(1).getTime | ||
| } | ||
|
|
||
| var lastTimestamp = -1L | ||
|
||
| testStream(df, useV2Sink = useV2Sink) ( | ||
| AddData(input, 1), | ||
| CheckLastBatch { rows: Seq[Row] => | ||
| lastTimestamp = assertBatchOutputAndUpdateLastTimestamp(rows, lastTimestamp, 1) | ||
| }, | ||
| Execute { _ => Thread.sleep(3 * 1000) }, | ||
| AddData(input, 2), | ||
| CheckLastBatch { rows: Seq[Row] => | ||
| lastTimestamp = assertBatchOutputAndUpdateLastTimestamp(rows, lastTimestamp, 2) | ||
| } | ||
| ) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| abstract class FakeSource extends StreamSourceProvider { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The root cause is
CurrentBatchTimestampisTimeZoneAwareExpressionwhich is unresolved without TimeZoneId.