Instance Constructors
-
new
HiveWarehouseSession()
Abstract Value Members
-
abstract
def
cleanUpStreamingMeta(checkpointLocation: String, queryId: String, tableName: String): Unit
-
abstract
def
close(): Unit
-
abstract
def
commitTxn(): Unit
-
abstract
def
createDatabase(database: String, ifNotExists: Boolean): Unit
-
-
abstract
def
describeTable(table: String): Dataset[Row]
-
abstract
def
dropDatabase(database: String, ifExists: Boolean, cascade: Boolean): Unit
-
abstract
def
dropTable(table: String, ifExists: Boolean, purge: Boolean): Unit
-
abstract
def
execute(sql: String): Dataset[Row]
-
abstract
def
executeQuery(sql: String, numSplitsToDemand: Int): Dataset[Row]
-
abstract
def
executeQuery(sql: String, useSplitsEqualToSparkCores: Boolean): Dataset[Row]
-
abstract
def
executeQuery(sql: String): Dataset[Row]
-
abstract
def
executeUpdate(sql: String, propagateException: Boolean): Boolean
-
abstract
def
executeUpdate(sql: String): Boolean
-
abstract
def
mergeBuilder(): MergeBuilder
-
abstract
def
q(sql: String): Dataset[Row]
-
abstract
def
session(): SparkSession
-
abstract
def
setDatabase(name: String): Unit
-
abstract
def
showDatabases(): Dataset[Row]
-
abstract
def
showTables(): Dataset[Row]
-
abstract
def
sql(queryToFetchData: String): Dataset[Row]
-
abstract
def
table(table: String): Dataset[Row]
Concrete Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any