Skip to content

Commit 5fe5894

Browse files
committed
fix serialization suite
1 parent 81711c4 commit 5fe5894

File tree

2 files changed

+7
-2
lines changed

2 files changed

+7
-2
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
156156

157157
val describedTable = "DESCRIBE (\\w+)".r
158158

159+
@transient
159160
val vs = new VariableSubstitution()
160161

161162
// we should substitute variables in hql to pass the text to parseSql() as a parameter.
@@ -203,6 +204,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
203204
* A list of test tables and the DDL required to initialize them. A test table is loaded on
204205
* demand when a query are run against it.
205206
*/
207+
@transient
206208
lazy val testTables = new mutable.HashMap[String, TestTable]()
207209

208210
def registerTestTable(testTable: TestTable): Unit = {
@@ -212,6 +214,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
212214
// The test tables that are defined in the Hive QTestUtil.
213215
// /itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
214216
// https://github.com/apache/hive/blob/branch-0.13/data/scripts/q_test_init.sql
217+
@transient
215218
val hiveQTestUtilTables = Seq(
216219
TestTable("src",
217220
"CREATE TABLE src (key INT, value STRING)".cmd,

sql/hive/src/test/scala/org/apache/spark/sql/hive/SerializationSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,10 @@ import org.apache.spark.sql.hive.test.TestHive
2626
class SerializationSuite extends FunSuite {
2727

2828
test("[SPARK-5840] HiveContext should be serializable") {
29-
val hiveContext = new HiveContext(TestHive.sparkContext)
29+
val hiveContext = TestHive
3030
hiveContext.hiveconf
31-
new JavaSerializer(new SparkConf()).newInstance().serialize(hiveContext)
31+
val serializer = new JavaSerializer(new SparkConf()).newInstance()
32+
val bytes = serializer.serialize(hiveContext)
33+
val deSer = serializer.deserialize[AnyRef](bytes)
3234
}
3335
}

0 commit comments

Comments
 (0)