diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala index ce0f17c3f569c..4a871a82dd2ef 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestTimeTravelTable.scala @@ -238,4 +238,16 @@ class TestTimeTravelTable extends HoodieSparkSqlTestBase { } } } + + test("Test Unsupported syntax can be parsed") { + if (HoodieSparkUtils.gteqSpark3_2) { + checkAnswer("select 1 distribute by 1")(Seq(1)) + withTempDir { dir => + val path = dir.toURI.getPath + spark.sql(s"insert overwrite local directory '$path' using parquet select 1") + // Requires enable hive support, so didn't test it + // spark.sql(s"insert overwrite local directory '$path' stored as orc select 1") + } + } + } } diff --git a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala index 59ef8dfe0969b..2b8931ace3b8a 100644 --- a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala +++ b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/parser/HoodieSpark3_2ExtendedSqlParser.scala @@ -32,6 +32,8 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} import org.apache.spark.sql.types._ import org.apache.spark.sql.{AnalysisException, SparkSession} +import scala.util.control.NonFatal + class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, delegate: ParserInterface) extends ParserInterface with Logging { @@ -39,9 +41,14 @@ class HoodieSpark3_2ExtendedSqlParser(session: SparkSession, delegate: ParserInt private lazy val builder = new HoodieSpark3_2ExtendedSqlAstBuilder(conf, delegate) override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { parser => - builder.visit(parser.singleStatement()) match { - case plan: LogicalPlan => plan - case _=> delegate.parsePlan(sqlText) + try { + builder.visit(parser.singleStatement()) match { + case plan: LogicalPlan => plan + case _=> delegate.parsePlan(sqlText) + } + } catch { + case NonFatal(_) => + delegate.parsePlan(sqlText) } }