Skip to content

Commit 4c1a012

Browse files
committed
Change 2.2.0 to 2.3.0
1 parent 7e424f0 commit 4c1a012

7 files changed

Lines changed: 20 additions & 19 deletions

File tree

python/pyspark/sql/readwriter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ def options(self, **options):
142142
self.option(k, options[k])
143143
return self
144144

145-
@since(2.2)
145+
@since(2.3)
146146
def unsetOption(self, key):
147147
"""Un-sets the option given to the key for the underlying data source.
148148
"""
@@ -583,7 +583,7 @@ def options(self, **options):
583583
self.option(k, options[k])
584584
return self
585585

586-
@since(2.2)
586+
@since(2.3)
587587
def unsetOption(self, key):
588588
"""Un-sets the option given to the key for the underlying data source.
589589
"""

python/pyspark/sql/streaming.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -374,11 +374,11 @@ def options(self, **options):
374374
self.option(k, options[k])
375375
return self
376376

377-
@since(2.2)
377+
@since(2.3)
378378
def unsetOption(self, key):
379379
"""Un-sets the option given to the key for the underlying data source.
380380
381-
.. note:: Experimental.
381+
.. note:: Evolving.
382382
"""
383383
self._jreader = self._jreader.unsetOption(key)
384384
return self
@@ -748,11 +748,11 @@ def options(self, **options):
748748
self.option(k, options[k])
749749
return self
750750

751-
@since(2.2)
751+
@since(2.3)
752752
def unsetOption(self, key):
753753
"""Un-sets the option given to the key for the underlying data source.
754754
755-
.. note:: Experimental.
755+
.. note:: Evolving.
756756
"""
757757
self._jwrite = self._jwrite.unsetOption(key)
758758
return self

sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
122122
/**
123123
* (Scala-specific) Adds an input option for the underlying data source.
124124
*
125-
* @since 2.2.0
125+
* @since 2.3.0
126126
*/
127127
def option(key: String, value: Seq[String]): DataFrameReader = {
128128
option(key, compact(render(value)))
@@ -131,7 +131,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
131131
/**
132132
* Adds an input option for the underlying data source.
133133
*
134-
* @since 2.2.0
134+
* @since 2.3.0
135135
*/
136136
def option(key: String, value: Array[String]): DataFrameReader = option(key, value.toSeq)
137137

@@ -170,7 +170,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
170170
/**
171171
* Un-sets an input option for the underlying data source.
172172
*
173-
* @since 2.2.0
173+
* @since 2.3.0
174174
*/
175175
def unsetOption(key: String): DataFrameReader = {
176176
this.extraOptions.remove(key)

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
131131
/**
132132
* (Scala-specific) Adds an output option for the underlying data source.
133133
*
134-
* @since 2.2.0
134+
* @since 2.3.0
135135
*/
136136
def option(key: String, value: Seq[String]): DataFrameWriter[T] = {
137137
option(key, compact(render(value)))
@@ -140,7 +140,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
140140
/**
141141
* Adds an output option for the underlying data source.
142142
*
143-
* @since 2.2.0
143+
* @since 2.3.0
144144
*/
145145
def option(key: String, value: Array[String]): DataFrameWriter[T] = option(key, value.toSeq)
146146

@@ -179,7 +179,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
179179
/**
180180
* Un-sets an output option for the underlying data source.
181181
*
182-
* @since 2.2.0
182+
* @since 2.3.0
183183
*/
184184
def unsetOption(key: String): DataFrameWriter[T] = {
185185
this.extraOptions.remove(key)

sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ final class DataStreamReader private[sql](sparkSession: SparkSession) extends Lo
114114
/**
115115
* (Scala-specific) Adds an input option for the underlying data source.
116116
*
117-
* @since 2.2.0
117+
* @since 2.3.0
118118
*/
119119
def option(key: String, value: Seq[String]): DataStreamReader = {
120120
option(key, compact(render(value)))
@@ -123,7 +123,7 @@ final class DataStreamReader private[sql](sparkSession: SparkSession) extends Lo
123123
/**
124124
* Adds an input option for the underlying data source.
125125
*
126-
* @since 2.2.0
126+
* @since 2.3.0
127127
*/
128128
def option(key: String, value: Array[String]): DataStreamReader = option(key, value.toSeq)
129129

@@ -162,7 +162,7 @@ final class DataStreamReader private[sql](sparkSession: SparkSession) extends Lo
162162
/**
163163
* Un-sets an input option for the underlying data source.
164164
*
165-
* @since 2.2.0
165+
* @since 2.3.0
166166
*/
167167
def unsetOption(key: String): DataStreamReader = {
168168
this.extraOptions.remove(key)

sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ final class DataStreamWriter[T] private[sql](ds: Dataset[T]) {
186186
/**
187187
* (Scala-specific) Adds an output option for the underlying data source.
188188
*
189-
* @since 2.2.0
189+
* @since 2.3.0
190190
*/
191191
def option(key: String, value: Seq[String]): DataStreamWriter[T] = {
192192
option(key, compact(render(value)))
@@ -195,7 +195,7 @@ final class DataStreamWriter[T] private[sql](ds: Dataset[T]) {
195195
/**
196196
* Adds an output option for the underlying data source.
197197
*
198-
* @since 2.2.0
198+
* @since 2.3.0
199199
*/
200200
def option(key: String, value: Array[String]): DataStreamWriter[T] = option(key, value.toSeq)
201201

@@ -234,7 +234,7 @@ final class DataStreamWriter[T] private[sql](ds: Dataset[T]) {
234234
/**
235235
* Un-sets an option option for the underlying data source.
236236
*
237-
* @since 2.2.0
237+
* @since 2.3.0
238238
*/
239239
def unsetOption(key: String): DataStreamWriter[T] = {
240240
this.extraOptions.remove(key)

sql/hive/src/test/scala/org/apache/spark/sql/sources/SimpleTextHadoopFsRelationSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,9 @@ class SimpleTextHadoopFsRelationSuite extends HadoopFsRelationTest with Predicat
6565

6666
test("test hadoop conf option propagation") {
6767
withTempPath { file =>
68-
// Test write side
6968
val nullVal: String = null
69+
70+
// Test write side
7071
val df = spark.range(10).selectExpr("cast(id as string)")
7172
df.write
7273
.option("some-random-write-option", "hahah-WRITE")

0 commit comments

Comments
 (0)