Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -118,12 +118,12 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {

def refreshTable(tableName: String): Unit = {
// TODO: Database support...
catalog.refreshTable("default", tableName)
catalog.refreshTable(Seq("default", tableName))
}

protected[hive] def invalidateTable(tableName: String): Unit = {
// TODO: Database support...
catalog.invalidateTable("default", tableName)
catalog.invalidateTable(Seq("default", tableName))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,18 +52,13 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
/** Connection to hive metastore. Usages should lock on `this`. */
protected[hive] val client = Hive.get(hive.hiveconf)

// TODO: Use this everywhere instead of tuples or databaseName, tableName,.
/** A fully qualified identifier for a table (i.e., database.tableName) */
case class QualifiedTableName(database: String, name: String) {
def toLowerCase = QualifiedTableName(database.toLowerCase, name.toLowerCase)
}

/** A cache of Spark SQL data source tables that have been accessed. */
protected[hive] val cachedDataSourceTables: LoadingCache[QualifiedTableName, LogicalPlan] = {
val cacheLoader = new CacheLoader[QualifiedTableName, LogicalPlan]() {
override def load(in: QualifiedTableName): LogicalPlan = {
protected[hive] val cachedDataSourceTables: LoadingCache[Seq[String], LogicalPlan] = {
val cacheLoader = new CacheLoader[Seq[String], LogicalPlan]() {
override def load(in: Seq[String]): LogicalPlan = {
logDebug(s"Creating new cached data source for $in")
val table = client.getTable(in.database, in.name)

val table = client.getTable(in(0), in(1))
val schemaString = table.getProperty("spark.sql.sources.schema")
val userSpecifiedSchema =
if (schemaString == null) {
Expand All @@ -89,12 +84,12 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
CacheBuilder.newBuilder().maximumSize(1000).build(cacheLoader)
}

def refreshTable(databaseName: String, tableName: String): Unit = {
cachedDataSourceTables.refresh(QualifiedTableName(databaseName, tableName).toLowerCase)
def refreshTable(tableIdentifier: Seq[String]): Unit = {
cachedDataSourceTables.refresh(tableIdentifier.map(_.toLowerCase()))
}

def invalidateTable(databaseName: String, tableName: String): Unit = {
cachedDataSourceTables.invalidate(QualifiedTableName(databaseName, tableName).toLowerCase)
def invalidateTable(tableIdentifier: Seq[String]): Unit = {
cachedDataSourceTables.invalidate(tableIdentifier.map(_.toLowerCase()))
}

val caseSensitive: Boolean = false
Expand Down Expand Up @@ -144,7 +139,7 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
val table = client.getTable(databaseName, tblName)

if (table.getProperty("spark.sql.sources.provider") != null) {
cachedDataSourceTables(QualifiedTableName(databaseName, tblName).toLowerCase)
cachedDataSourceTables(Seq(databaseName, tblName))
} else if (table.isView) {
// if the unresolved relation is from hive view
// parse the text into logic node.
Expand Down