Skip to content

Commit c049fa4

Browse files
committed
[SPARK-11191][SQL][FOLLOW-UP] Cleans up unnecessary anonymous HiveFunctionRegistry
According to discussion in PR #9664, the anonymous `HiveFunctionRegistry` in `HiveContext` can be removed now. Author: Cheng Lian <[email protected]> Closes #9737 from liancheng/spark-11191.follow-up. (cherry picked from commit fa13301) Signed-off-by: Cheng Lian <[email protected]>
1 parent 62ad81a commit c049fa4

2 files changed

Lines changed: 6 additions & 11 deletions

File tree

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -454,15 +454,7 @@ class HiveContext private[hive](
454454
// Note that HiveUDFs will be overridden by functions registered in this context.
455455
@transient
456456
override protected[sql] lazy val functionRegistry: FunctionRegistry =
457-
new HiveFunctionRegistry(FunctionRegistry.builtin.copy(), this) {
458-
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
459-
// Hive Registry need current database to lookup function
460-
// TODO: the current database of executionHive should be consistent with metadataHive
461-
executionHive.withHiveState {
462-
super.lookupFunction(name, children)
463-
}
464-
}
465-
}
457+
new HiveFunctionRegistry(FunctionRegistry.builtin.copy(), this.executionHive)
466458

467459
// The Hive UDF current_database() is foldable, will be evaluated by optimizer, but the optimizer
468460
// can't access the SessionState of metadataHive.

sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,16 +43,19 @@ import org.apache.spark.sql.catalyst.plans.logical._
4343
import org.apache.spark.sql.catalyst.rules.Rule
4444
import org.apache.spark.sql.catalyst.util.ArrayData
4545
import org.apache.spark.sql.hive.HiveShim._
46+
import org.apache.spark.sql.hive.client.ClientWrapper
4647
import org.apache.spark.sql.types._
4748

4849

4950
private[hive] class HiveFunctionRegistry(
5051
underlying: analysis.FunctionRegistry,
51-
hiveContext: HiveContext)
52+
executionHive: ClientWrapper)
5253
extends analysis.FunctionRegistry with HiveInspectors {
5354

5455
def getFunctionInfo(name: String): FunctionInfo = {
55-
hiveContext.executionHive.withHiveState {
56+
// Hive Registry need current database to lookup function
57+
// TODO: the current database of executionHive should be consistent with metadataHive
58+
executionHive.withHiveState {
5659
FunctionRegistry.getFunctionInfo(name)
5760
}
5861
}

0 commit comments

Comments
 (0)