Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,10 @@ singleTableIdentifier
: tableIdentifier EOF
;

singleFunctionIdentifier
: functionIdentifier EOF
;

singleDataType
: dataType EOF
;
Expand Down Expand Up @@ -493,6 +497,10 @@ tableIdentifier
: (db=identifier '.')? table=identifier
;

functionIdentifier
: (db=identifier '.')? function=identifier
;

namedExpression
: expression (AS? (identifier | identifierList))?
;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
visitTableIdentifier(ctx.tableIdentifier)
}

override def visitSingleFunctionIdentifier(
ctx: SingleFunctionIdentifierContext): FunctionIdentifier = withOrigin(ctx) {
visitFunctionIdentifier(ctx.functionIdentifier)
}

override def visitSingleDataType(ctx: SingleDataTypeContext): DataType = withOrigin(ctx) {
visitSparkDataType(ctx.dataType)
}
Expand Down Expand Up @@ -759,6 +764,14 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
TableIdentifier(ctx.table.getText, Option(ctx.db).map(_.getText))
}

/**
* Create a [[FunctionIdentifier]] from a 'functionName' or 'databaseName'.'functionName' pattern.
*/
override def visitFunctionIdentifier(
ctx: FunctionIdentifierContext): FunctionIdentifier = withOrigin(ctx) {
FunctionIdentifier(ctx.function.getText, Option(ctx.db).map(_.getText))
}

/* ********************************************************************************************
* Expression parsing
* ******************************************************************************************** */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.antlr.v4.runtime.misc.ParseCancellationException

import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.Origin
Expand All @@ -49,6 +49,11 @@ abstract class AbstractSqlParser extends ParserInterface with Logging {
astBuilder.visitSingleTableIdentifier(parser.singleTableIdentifier())
}

/** Creates FunctionIdentifier for a given SQL string. */
def parseFunctionIdentifier(sqlText: String): FunctionIdentifier = parse(sqlText) { parser =>
astBuilder.visitSingleFunctionIdentifier(parser.singleFunctionIdentifier())
}

/**
* Creates StructType for a given SQL string, which is a comma separated list of field
* definitions which will preserve the correct Hive metadata.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.catalyst.parser

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.types.StructType
Expand All @@ -35,6 +35,9 @@ trait ParserInterface {
/** Creates TableIdentifier for a given SQL string. */
def parseTableIdentifier(sqlText: String): TableIdentifier

/** Creates FunctionIdentifier for a given SQL string. */
def parseFunctionIdentifier(sqlText: String): FunctionIdentifier

/**
* Creates StructType for a given SQL string, which is a comma separated list of field
* definitions which will preserve the correct Hive metadata.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -591,8 +591,13 @@ class SparkSession private(
@transient lazy val catalog: Catalog = new CatalogImpl(self)

/**
* Returns the specified table as a `DataFrame`.
* Returns the specified table/view as a `DataFrame`.
*
* @param tableName is either a qualified or unqualified name that designates a table or view.
* If a database is specified, it identifies the table/view from the database.
* Otherwise, it first attempts to find a temporary view with the given name
* and then match the table/view from the current database.
* Note that, the global temporary view database is also valid here.
* @since 2.0.0
*/
def table(tableName: String): DataFrame = {
Expand Down
Loading