1818package org .apache .spark .sql .sources
1919
2020import java .io .File
21+ import java .net .URI
2122
2223import org .apache .spark .sql .{AnalysisException , QueryTest }
2324import org .apache .spark .sql .catalyst .expressions .UnsafeProjection
@@ -65,6 +66,11 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
6566
6667 private val df = (0 until 50 ).map(i => (i % 5 , i % 13 , i.toString)).toDF(" i" , " j" , " k" )
6768
69+ def tableDir : File = {
70+ val identifier = hiveContext.sqlParser.parseTableIdentifier(" bucketed_table" )
71+ new File (URI .create(hiveContext.catalog.hiveDefaultTableFilePath(identifier)))
72+ }
73+
6874 /**
6975 * A helper method to check the bucket write functionality in low level, i.e. check the written
7076 * bucket files to see if the data are correct. User should pass in a data dir that these bucket
@@ -82,7 +88,7 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
8288 dataDir.isDirectory
8389 throw new NullPointerException (s """
8490 |dataDir.listFiles() is NULL
85- |== Directory(isDirectory= ${dataDir.isDirectory}) ==
91+ |== Directory(isDirectory= ${dataDir.isDirectory} exists= ${dataDir.exists} ) ==
8692 | $dataDir
8793 |== Hive Warehouse(isDirectory= ${hiveContext.warehousePath.isDirectory}) ==
8894 | ${hiveContext.warehousePath}
@@ -139,7 +145,6 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
139145 .bucketBy(8 , " j" , " k" )
140146 .saveAsTable(" bucketed_table" )
141147
142- val tableDir = new File (hiveContext.warehousePath, " bucketed_table" )
143148 for (i <- 0 until 5 ) {
144149 testBucketing(new File (tableDir, s " i= $i" ), source, 8 , Seq (" j" , " k" ))
145150 }
@@ -157,7 +162,6 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
157162 .sortBy(" k" )
158163 .saveAsTable(" bucketed_table" )
159164
160- val tableDir = new File (hiveContext.warehousePath, " bucketed_table" )
161165 for (i <- 0 until 5 ) {
162166 testBucketing(new File (tableDir, s " i= $i" ), source, 8 , Seq (" j" ), Seq (" k" ))
163167 }
@@ -173,7 +177,6 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
173177 .bucketBy(8 , " i" , " j" )
174178 .saveAsTable(" bucketed_table" )
175179
176- val tableDir = new File (hiveContext.warehousePath, " bucketed_table" )
177180 testBucketing(tableDir, source, 8 , Seq (" i" , " j" ))
178181 }
179182 }
@@ -188,7 +191,6 @@ class BucketedWriteSuite extends QueryTest with SQLTestUtils with TestHiveSingle
188191 .sortBy(" k" )
189192 .saveAsTable(" bucketed_table" )
190193
191- val tableDir = new File (hiveContext.warehousePath, " bucketed_table" )
192194 testBucketing(tableDir, source, 8 , Seq (" i" , " j" ), Seq (" k" ))
193195 }
194196 }
0 commit comments