Skip to content
Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ statement
| op=(ADD | LIST) identifier .*? #manageResource
| SET ROLE .*? #failNativeCommand
| SET .*? #setConfiguration
| RESET #resetConfiguration
| RESET .*? #resetConfiguration
| unsupportedHiveNativeCommands .*? #failNativeCommand
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,13 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
* Example SQL :
* {{{
* RESET;
* RESET key;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about

RESET `special#$!`?

* }}}
*/
override def visitResetConfiguration(
ctx: ResetConfigurationContext): LogicalPlan = withOrigin(ctx) {
ResetCommand
val raw = remainder(ctx.RESET.getSymbol)
if (raw.nonEmpty) ResetCommand(Some(raw.trim)) else ResetCommand(None)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,12 +150,19 @@ object SetCommand {
* This command is for resetting SQLConf to the default values. Command that runs
* {{{
* reset;
* reset key;
* }}}
*/
case object ResetCommand extends RunnableCommand with Logging {
case class ResetCommand(key: Option[String]) extends RunnableCommand with Logging {

override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.sessionState.conf.clear()
key match {
case None =>
sparkSession.sessionState.conf.clear()
// "RESET key" clear a specific property.
case Some(key) =>
sparkSession.conf.unset(key)
}
Seq.empty[Row]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -301,4 +301,10 @@ class SparkSqlParserSuite extends PlanTest {
"SELECT a || b || c FROM t",
Project(UnresolvedAlias(concat) :: Nil, UnresolvedRelation(TableIdentifier("t"))))
}

test("reset") {
assertEqual("reset", ResetCommand(None))
assertEqual("reset spark.test.property", ResetCommand(Some("spark.test.property")))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we add a test for

reset `#a!`

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok, now added one plus sql-parse test for reset special character.

assertEqual("reset #$a!", ResetCommand(Some("#$a!")))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we check hive's behavior? I think special chars are not allowed in config name and parser should throw exception for this case.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hive 2.1.1 just play the but same behavior. I thought it is due to hardly define what is normal property as user can define property as they wish.
Hive case:

hive (temp)> set #$!@=1024;
hive (temp)> set #$!@;
#$!@=1024
hive (temp)>

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -114,50 +114,55 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
}
}

test("reset - public conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL)
try {
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1)
sql(s"reset")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original")
Seq("reset", s"reset ${SQLConf.GROUP_BY_ORDINAL.key}").foreach { resetCmd =>
test(s"reset - public conf $resetCmd") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: seems better to make the test name s"$resetCmd - public conf"

Copy link
Author

@ericsahit ericsahit Jun 26, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done, sounds like a more reasonable name

spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.GROUP_BY_ORDINAL)
try {
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=false")
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === false)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 1)
sql(resetCmd)
assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL) === true)
assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not introduced in this PR, but we don't need to do this as we call spark.sessionState.conf.clear() at the beginning.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would you mean it is a duplicated clean action? I suppose the purpose is to prevent impact of other tests..

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is OK. After clear(), original gets the default value of the config, so here it wants to keep the default value unchanged.

}
}
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let us combine the three newly added test cases with the existing ones. For example,

  Seq("reset", s"reset ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}").foreach { cmd =>
    test(s"$cmd - internal conf") {
      ...
        sql(cmd)
      ...
  }

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After done this, building threw SQLConfSuite is not a test, maybe this style does not work. @gatorsmile


test("reset - internal conf") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS)
try {
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1)
sql(s"reset")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original")
Seq("reset", s"reset ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}").foreach { resetCmd =>
test(s"reset - internal conf $resetCmd") {
spark.sessionState.conf.clear()
val original = spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS)
try {
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=10")
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 10)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 1)
sql(resetCmd)
assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100)
assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0)
} finally {
sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original")
}
}
}

test("reset - user-defined conf") {
spark.sessionState.conf.clear()
val userDefinedConf = "x.y.z.reset"
try {
assert(spark.conf.getOption(userDefinedConf).isEmpty)
sql(s"set $userDefinedConf=false")
assert(spark.conf.get(userDefinedConf) === "false")
assert(sql(s"set").where(s"key = '$userDefinedConf'").count() == 1)
sql(s"reset")
assert(spark.conf.getOption(userDefinedConf).isEmpty)
} finally {
spark.conf.unset(userDefinedConf)
Seq("reset", s"reset $testKey").foreach { resetCmd =>
test(s"reset - user-defined conf $resetCmd") {
spark.sessionState.conf.clear()
try {
assert(spark.conf.getOption(testKey).isEmpty)
sql(s"set $testKey=false")
assert(spark.conf.get(testKey) === "false")
assert(sql(s"set").where(s"key = '$testKey'").count() == 1)
sql(resetCmd)
assert(spark.conf.getOption(testKey).isEmpty)
} finally {
spark.conf.unset(testKey)
}
}
}

Expand Down