Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -551,9 +551,9 @@ object SQLConf {
.intConf
.createWithDefault(100)

val WHOLESTAGE_FALLBACK = buildConf("spark.sql.codegen.fallback")
val CODEGEN_FALLBACK = buildConf("spark.sql.codegen.fallback")
.internal()
.doc("When true, whole stage codegen could be temporary disabled for the part of query that" +
.doc("When true, (whole stage) codegen could be temporary disabled for the part of query that" +
" fail to compile generated code")
.booleanConf
.createWithDefault(true)
Expand Down Expand Up @@ -1041,7 +1041,7 @@ class SQLConf extends Serializable with Logging {

def wholeStageMaxNumFields: Int = getConf(WHOLESTAGE_MAX_NUM_FIELDS)

def wholeStageFallback: Boolean = getConf(WHOLESTAGE_FALLBACK)
def codegenFallback: Boolean = getConf(CODEGEN_FALLBACK)

def maxCaseBranchesForCodegen: Int = getConf(MAX_CASES_BRANCHES)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
@transient
final val sqlContext = SparkSession.getActiveSession.map(_.sqlContext).orNull

// whether we should fallback when hitting compilation errors caused by codegen
private val codeGenFallBack = sqlContext == null || sqlContext.conf.codegenFallback
Copy link
Member

@kiszk kiszk Aug 27, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it better to add !Utils.isTesting && or to drop !Utils.isTesting && from WholeStageCodegenExec to make these conditions consistent?

Copy link
Member Author

@gatorsmile gatorsmile Aug 27, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Originally, I did it like what you said. However, if using that approach, I need to remove the test case. Then, I think we might just keep using the codegenFallback for controlling it.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see


protected def sparkContext = sqlContext.sparkContext

// sqlContext will be null when we are being deserialized on the slaves. In this instance
Expand Down Expand Up @@ -370,8 +373,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ
try {
GeneratePredicate.generate(expression, inputSchema)
} catch {
case e @ (_: JaninoRuntimeException | _: CompileException)
if sqlContext == null || sqlContext.conf.wholeStageFallback =>
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because sqlContext is always null when running it in executors, and thus, this always return true.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Removing the null check here makes sense although this means existing spark jobs that were previously switching to the non-codegen version even with sqlContext.conf.wholeStageFallback = false will now start failing at runtime (perhaps, rightly so). Might be worth calling this out in the 2.3 release notes and/or the migration guide.

case _ @ (_: JaninoRuntimeException | _: CompileException) if codeGenFallBack =>
genInterpretedPredicate(expression, inputSchema)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ case class WholeStageCodegenExec(child: SparkPlan) extends UnaryExecNode with Co
try {
CodeGenerator.compile(cleanedSource)
} catch {
case e: Exception if !Utils.isTesting && sqlContext.conf.wholeStageFallback =>
case _: Exception if !Utils.isTesting && sqlContext.conf.codegenFallback =>
// We should already saw the error message
logWarning(s"Whole-stage codegen disabled for this plan:\n $treeString")
return child.execute()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSQLContext {
v
}
withSQLConf(
(SQLConf.WHOLESTAGE_FALLBACK.key, codegenFallback.toString),
(SQLConf.CODEGEN_FALLBACK.key, codegenFallback.toString),
(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key, wholeStage.toString)) {
val df = spark.range(0, 4, 1, 4).withColumn("c", c)
val rows = df.collect()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2011,7 +2011,17 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {

val filter = (0 until N)
.foldLeft(lit(false))((e, index) => e.or(df.col(df.columns(index)) =!= "string"))
df.filter(filter).count

withSQLConf(SQLConf.CODEGEN_FALLBACK.key -> "true") {
df.filter(filter).count()
}

withSQLConf(SQLConf.CODEGEN_FALLBACK.key -> "false") {
val e = intercept[SparkException] {
df.filter(filter).count()
}.getMessage
assert(e.contains("grows beyond 64 KB"))
}
}

test("SPARK-20897: cached self-join should not fail") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import org.scalatest.concurrent.Eventually

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.internal.SQLConf

/**
* Helper trait for SQL test suites where all tests share a single [[TestSparkSession]].
Expand All @@ -34,6 +35,7 @@ trait SharedSQLContext extends SQLTestUtils with BeforeAndAfterEach with Eventua
new SparkConf()
.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName)
.set("spark.unsafe.exceptionOnMemoryLeak", "true")
.set(SQLConf.CODEGEN_FALLBACK.key, "false")
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ object TestHive
"TestSQLContext",
new SparkConf()
.set("spark.sql.test", "")
.set(SQLConf.CODEGEN_FALLBACK.key, "false")
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Turn it false to ensure it does not hide the actual bugs of our expression codegen that causes compilation falure.

.set("spark.sql.hive.metastore.barrierPrefixes",
"org.apache.spark.sql.hive.execution.PairSerDe")
.set("spark.sql.warehouse.dir", TestHiveContext.makeWarehouseDir().toURI.getPath)
Expand Down