|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.hive.ql.metadata.Hive
public class Hive
The Hive class contains information about this instance of Hive. An instance of Hive represents a set of data in a file system (usually HDFS) organized for easy query processing
Method Summary | |
---|---|
void |
alterDatabase(String dbName,
Database db)
|
void |
alterIndex(String dbName,
String baseTblName,
String idxName,
Index newIdx)
Updates the existing index metadata with the new metadata. |
void |
alterPartition(String tblName,
Partition newPart)
Updates the existing table metadata with the new metadata. |
void |
alterTable(String tblName,
Table newTbl)
Updates the existing table metadata with the new metadata. |
static void |
closeCurrent()
|
protected static void |
copyFiles(org.apache.hadoop.fs.Path srcf,
org.apache.hadoop.fs.Path destf,
org.apache.hadoop.fs.FileSystem fs)
|
void |
createDatabase(Database db)
Create a Database. |
void |
createDatabase(Database db,
boolean ifNotExist)
Create a database |
void |
createIndex(String tableName,
String indexName,
String indexHandlerClass,
List<String> indexedCols,
String indexTblName,
boolean deferredRebuild,
String inputFormat,
String outputFormat,
String serde,
String storageHandler,
String location,
Map<String,String> idxProps,
Map<String,String> tblProps,
Map<String,String> serdeProps,
String collItemDelim,
String fieldDelim,
String fieldEscape,
String lineDelim,
String mapKeyDelim,
String indexComment)
|
Partition |
createPartition(Table tbl,
Map<String,String> partSpec)
Creates a partition. |
Partition |
createPartition(Table tbl,
Map<String,String> partSpec,
org.apache.hadoop.fs.Path location)
Creates a partition |
void |
createRole(String roleName,
String ownerName)
|
void |
createTable(String tableName,
List<String> columns,
List<String> partCols,
Class<? extends org.apache.hadoop.mapred.InputFormat> fileInputFormat,
Class<?> fileOutputFormat)
Creates a table metdata and the directory for the table data |
void |
createTable(String tableName,
List<String> columns,
List<String> partCols,
Class<? extends org.apache.hadoop.mapred.InputFormat> fileInputFormat,
Class<?> fileOutputFormat,
int bucketCount,
List<String> bucketCols)
Creates a table metdata and the directory for the table data |
void |
createTable(Table tbl)
Creates the table with the give objects |
void |
createTable(Table tbl,
boolean ifNotExists)
Creates the table with the give objects |
boolean |
databaseExists(String dbName)
Query metadata to see if a database with the given name already exists. |
void |
dropDatabase(String name)
Drop a database. |
void |
dropDatabase(String name,
boolean deleteData,
boolean ignoreUnknownDb)
Drop a database |
boolean |
dropIndex(String db_name,
String tbl_name,
String index_name,
boolean deleteData)
|
boolean |
dropPartition(String db_name,
String tbl_name,
List<String> part_vals,
boolean deleteData)
|
void |
dropRole(String roleName)
|
void |
dropTable(String tableName)
Drops table along with the data in it. |
void |
dropTable(String dbName,
String tableName)
Drops table along with the data in it. |
void |
dropTable(String dbName,
String tableName,
boolean deleteData,
boolean ignoreUnknownTab)
Drops the table. |
PrincipalPrivilegeSet |
get_privilege_set(HiveObjectType objectType,
String db_name,
String table_name,
List<String> part_values,
String column_name,
String user_name,
List<String> group_names)
|
static Hive |
get()
|
static Hive |
get(HiveConf c)
Gets hive object for the current thread. |
static Hive |
get(HiveConf c,
boolean needsRefresh)
get a connection to metastore. |
List<String> |
getAllDatabases()
Get all existing database names. |
List<String> |
getAllRoleNames()
Get all existing role names. |
List<String> |
getAllTables()
Get all table names for the current database. |
List<String> |
getAllTables(String dbName)
Get all table names for the specified database. |
HiveConf |
getConf()
|
String |
getCurrentDatabase()
Get the name of the current database |
Database |
getDatabase(String dbName)
Get the database by name. |
List<String> |
getDatabasesByPattern(String databasePattern)
Get all existing databases that match the given pattern. |
static List<FieldSchema> |
getFieldsFromDeserializer(String name,
Deserializer serde)
|
Index |
getIndex(String baseTableName,
String indexName)
|
Index |
getIndex(String dbName,
String baseTableName,
String indexName)
|
List<Index> |
getIndexes(String dbName,
String tblName,
short max)
|
Partition |
getPartition(Table tbl,
Map<String,String> partSpec,
boolean forceCreate)
|
Partition |
getPartition(Table tbl,
Map<String,String> partSpec,
boolean forceCreate,
String partPath)
Returns partition metadata |
List<String> |
getPartitionNames(String dbName,
String tblName,
Map<String,String> partSpec,
short max)
|
List<String> |
getPartitionNames(String dbName,
String tblName,
short max)
|
List<Partition> |
getPartitions(Table tbl)
get all the partitions that the table has |
List<Partition> |
getPartitions(Table tbl,
Map<String,String> partialPartSpec)
get all the partitions of the table that matches the given partial specification. |
List<Partition> |
getPartitionsByNames(Table tbl,
Map<String,String> partialPartSpec)
get all the partitions of the table that matches the given partial specification. |
Table |
getTable(String tableName)
Returns metadata for the table named tableName in the current database. |
Table |
getTable(String dbName,
String tableName)
Returns metadata of the table |
Table |
getTable(String dbName,
String tableName,
boolean throwException)
Returns metadata of the table |
List<String> |
getTablesByPattern(String tablePattern)
Returns all existing tables from default database which match the given pattern. |
List<String> |
getTablesByPattern(String dbName,
String tablePattern)
Returns all existing tables from the specified database which match the given pattern. |
List<String> |
getTablesForDb(String database,
String tablePattern)
Returns all existing tables from the given database which match the given pattern. |
boolean |
grantPrivileges(PrivilegeBag privileges)
|
boolean |
grantRole(String roleName,
String userName,
PrincipalType principalType,
String grantor,
PrincipalType grantorType,
boolean grantOption)
|
List<Role> |
listRoles(String userName,
PrincipalType principalType)
|
ArrayList<LinkedHashMap<String,String>> |
loadDynamicPartitions(org.apache.hadoop.fs.Path loadPath,
String tableName,
Map<String,String> partSpec,
boolean replace,
int numDP,
boolean holdDDLTime)
Given a source directory name of the load path, load all dynamically generated partitions into the specified table and return a list of strings that represent the dynamic partition paths. |
void |
loadPartition(org.apache.hadoop.fs.Path loadPath,
String tableName,
Map<String,String> partSpec,
boolean replace,
boolean holdDDLTime)
Load a directory into a Hive Table Partition - Alters existing content of the partition with the contents of loadPath. |
void |
loadTable(org.apache.hadoop.fs.Path loadPath,
String tableName,
boolean replace,
boolean holdDDLTime)
Load a directory into a Hive Table. |
protected static void |
replaceFiles(org.apache.hadoop.fs.Path srcf,
org.apache.hadoop.fs.Path destf,
org.apache.hadoop.fs.Path oldPath,
org.apache.hadoop.conf.Configuration conf)
Replaces files in the partition with new data set specified by srcf. |
boolean |
revokePrivileges(PrivilegeBag privileges)
|
boolean |
revokeRole(String roleName,
String userName,
PrincipalType principalType)
|
void |
setCurrentDatabase(String currentDatabase)
Set the name of the current database |
List<HiveObjectPrivilege> |
showPrivilegeGrant(HiveObjectType objectType,
String principalName,
PrincipalType principalType,
String dbName,
String tableName,
List<String> partValues,
String columnName)
|
List<Role> |
showRoleGrant(String principalName,
PrincipalType principalType)
|
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait |
Method Detail |
---|
public static Hive get(HiveConf c) throws HiveException
c
- new Hive Configuration
HiveException
public static Hive get(HiveConf c, boolean needsRefresh) throws HiveException
c
- new confneedsRefresh
- if true then creates a new one
HiveException
public static Hive get() throws HiveException
HiveException
public static void closeCurrent()
public void createDatabase(Database db, boolean ifNotExist) throws AlreadyExistsException, HiveException
db
- ifNotExist
- if true, will ignore AlreadyExistsException exception
AlreadyExistsException
HiveException
public void createDatabase(Database db) throws AlreadyExistsException, HiveException
db
-
AlreadyExistsException
HiveException
public void dropDatabase(String name) throws HiveException, NoSuchObjectException
name
-
NoSuchObjectException
HiveException
HiveMetaStoreClient.dropDatabase(java.lang.String)
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb) throws HiveException, NoSuchObjectException
name
- deleteData
- ignoreUnknownDb
- if true, will ignore NoSuchObjectException
HiveException
NoSuchObjectException
public void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends org.apache.hadoop.mapred.InputFormat> fileInputFormat, Class<?> fileOutputFormat) throws HiveException
tableName
- name of the tablecolumns
- list of fields of the tablepartCols
- partition keys of the tablefileInputFormat
- Class of the input format of the table data filefileOutputFormat
- Class of the output format of the table data file
HiveException
- thrown if the args are invalid or if the metadata or the data
directory couldn't be createdpublic void createTable(String tableName, List<String> columns, List<String> partCols, Class<? extends org.apache.hadoop.mapred.InputFormat> fileInputFormat, Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols) throws HiveException
tableName
- name of the tablecolumns
- list of fields of the tablepartCols
- partition keys of the tablefileInputFormat
- Class of the input format of the table data filefileOutputFormat
- Class of the output format of the table data filebucketCount
- number of buckets that each partition (or the table itself) should
be divided into
HiveException
- thrown if the args are invalid or if the metadata or the data
directory couldn't be createdpublic void alterTable(String tblName, Table newTbl) throws InvalidOperationException, HiveException
tblName
- name of the existing tablenewTbl
- new name of the table. could be the old name
InvalidOperationException
- if the changes in metadata is not acceptable
org.apache.thrift.TException
HiveException
public void alterIndex(String dbName, String baseTblName, String idxName, Index newIdx) throws InvalidOperationException, HiveException
idxName
- name of the existing indexnewIdx
- new name of the index. could be the old name
InvalidOperationException
- if the changes in metadata is not acceptable
org.apache.thrift.TException
HiveException
public void alterPartition(String tblName, Partition newPart) throws InvalidOperationException, HiveException
tblName
- name of the existing tablenewTbl
- new name of the table. could be the old name
InvalidOperationException
- if the changes in metadata is not acceptable
org.apache.thrift.TException
HiveException
public void alterDatabase(String dbName, Database db) throws HiveException
HiveException
public void createTable(Table tbl) throws HiveException
tbl
- a table object
HiveException
public void createTable(Table tbl, boolean ifNotExists) throws HiveException
tbl
- a table objectifNotExists
- if true, ignore AlreadyExistsException
HiveException
public void createIndex(String tableName, String indexName, String indexHandlerClass, List<String> indexedCols, String indexTblName, boolean deferredRebuild, String inputFormat, String outputFormat, String serde, String storageHandler, String location, Map<String,String> idxProps, Map<String,String> tblProps, Map<String,String> serdeProps, String collItemDelim, String fieldDelim, String fieldEscape, String lineDelim, String mapKeyDelim, String indexComment) throws HiveException
tableName
- table nameindexName
- index nameindexHandlerClass
- index handler classindexedCols
- index columnsindexTblName
- index table's namedeferredRebuild
- referred build index table's datainputFormat
- input formatoutputFormat
- output formatserde
- storageHandler
- index table's storage handlerlocation
- locationidxProps
- idxserdeProps
- serde propertiescollItemDelim
- fieldDelim
- fieldEscape
- lineDelim
- mapKeyDelim
-
HiveException
public Index getIndex(String baseTableName, String indexName) throws HiveException
HiveException
public Index getIndex(String dbName, String baseTableName, String indexName) throws HiveException
HiveException
public boolean dropIndex(String db_name, String tbl_name, String index_name, boolean deleteData) throws HiveException
HiveException
public void dropTable(String tableName) throws HiveException
dbName
- database where the table livestableName
- table to dropdbName
- database where the table livestableName
- table to drop
HiveException
- thrown if the drop fails
Drops table along with the data in it. If the table doesn't exist then it
is a no-op
HiveException
- thrown if the drop failspublic void dropTable(String dbName, String tableName) throws HiveException
dbName
- database where the table livestableName
- table to dropdbName
- database where the table livestableName
- table to drop
HiveException
- thrown if the drop fails
Drops table along with the data in it. If the table doesn't exist then it
is a no-op
HiveException
- thrown if the drop failspublic void dropTable(String dbName, String tableName, boolean deleteData, boolean ignoreUnknownTab) throws HiveException
tableName
- deleteData
- deletes the underlying data along with metadataignoreUnknownTab
- an exception if thrown if this is falser and table doesn't exist
HiveException
public HiveConf getConf()
public Table getTable(String tableName) throws HiveException
tableName
- the name of the table
HiveException
- if there's an internal error or if the
table doesn't existpublic Table getTable(String dbName, String tableName) throws HiveException
dbName
- the name of the databasetableName
- the name of the table
HiveException
- if there's an internal error or if the table doesn't existpublic Table getTable(String dbName, String tableName, boolean throwException) throws HiveException
dbName
- the name of the databasetableName
- the name of the tablethrowException
- controls whether an exception is thrown or a returns a null
HiveException
public List<String> getAllTables() throws HiveException
HiveException
public List<String> getAllTables(String dbName) throws HiveException
dbName
-
HiveException
public List<String> getTablesByPattern(String tablePattern) throws HiveException
tablePattern
- java re pattern
HiveException
public List<String> getTablesByPattern(String dbName, String tablePattern) throws HiveException
dbName
- tablePattern
-
HiveException
public List<String> getTablesForDb(String database, String tablePattern) throws HiveException
database
- the database nametablePattern
- java re pattern
HiveException
public List<String> getAllDatabases() throws HiveException
HiveException
public List<String> getDatabasesByPattern(String databasePattern) throws HiveException
databasePattern
- java re pattern
HiveException
public boolean grantPrivileges(PrivilegeBag privileges) throws HiveException
HiveException
public boolean revokePrivileges(PrivilegeBag privileges) throws HiveException
userName
- principal nameisRole
- is the given principal name a roleisGroup
- is the given principal name a groupprivileges
- a bag of privileges
HiveException
public boolean databaseExists(String dbName) throws HiveException
dbName
-
HiveException
public Database getDatabase(String dbName) throws HiveException
dbName
- the name of the database.
HiveException
public void loadPartition(org.apache.hadoop.fs.Path loadPath, String tableName, Map<String,String> partSpec, boolean replace, boolean holdDDLTime) throws HiveException
loadPath
- Directory containing files to load into TabletableName
- name of table to be loaded.partSpec
- defines which partition needs to be loadedreplace
- if true - replace files in the partition, otherwise add files to
the partitiontmpDirPath
- The temporary directory.
HiveException
public ArrayList<LinkedHashMap<String,String>> loadDynamicPartitions(org.apache.hadoop.fs.Path loadPath, String tableName, Map<String,String> partSpec, boolean replace, int numDP, boolean holdDDLTime) throws HiveException
loadPath
- tableName
- partSpec
- replace
- tmpDirPath
- numSp:
- number of static partitions in the partition spec
HiveException
public void loadTable(org.apache.hadoop.fs.Path loadPath, String tableName, boolean replace, boolean holdDDLTime) throws HiveException
loadPath
- Directory containing files to load into TabletableName
- name of table to be loaded.replace
- if true - replace files in the table, otherwise add files to tabletmpDirPath
- The temporary directory.
HiveException
public Partition createPartition(Table tbl, Map<String,String> partSpec) throws HiveException
tbl
- table for which partition needs to be createdpartSpec
- partition keys and their values
HiveException
- if table doesn't exist or partition already existspublic Partition createPartition(Table tbl, Map<String,String> partSpec, org.apache.hadoop.fs.Path location) throws HiveException
tbl
- table for which partition needs to be createdpartSpec
- partition keys and their valueslocation
- location of this partition
HiveException
- if table doesn't exist or partition already existspublic Partition getPartition(Table tbl, Map<String,String> partSpec, boolean forceCreate) throws HiveException
HiveException
public Partition getPartition(Table tbl, Map<String,String> partSpec, boolean forceCreate, String partPath) throws HiveException
tbl
- the partition's tablepartSpec
- partition keys and valuesforceCreate
- if this is true and partition doesn't exist then a partition is
created
HiveException
public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws HiveException
HiveException
public List<String> getPartitionNames(String dbName, String tblName, short max) throws HiveException
HiveException
public List<String> getPartitionNames(String dbName, String tblName, Map<String,String> partSpec, short max) throws HiveException
HiveException
public List<Partition> getPartitions(Table tbl) throws HiveException
tbl
- object for which partition is needed
HiveException
public List<Partition> getPartitions(Table tbl, Map<String,String> partialPartSpec) throws HiveException
tbl
- object for which partition is needed. Must be partitioned.
HiveException
public List<Partition> getPartitionsByNames(Table tbl, Map<String,String> partialPartSpec) throws HiveException
tbl
- object for which partition is needed. Must be partitioned.
HiveException
public String getCurrentDatabase()
public void setCurrentDatabase(String currentDatabase)
currentDatabase
- public void createRole(String roleName, String ownerName) throws HiveException
HiveException
public void dropRole(String roleName) throws HiveException
HiveException
public List<String> getAllRoleNames() throws HiveException
HiveException
public List<Role> showRoleGrant(String principalName, PrincipalType principalType) throws HiveException
HiveException
public boolean grantRole(String roleName, String userName, PrincipalType principalType, String grantor, PrincipalType grantorType, boolean grantOption) throws HiveException
HiveException
public boolean revokeRole(String roleName, String userName, PrincipalType principalType) throws HiveException
HiveException
public List<Role> listRoles(String userName, PrincipalType principalType) throws HiveException
HiveException
public PrincipalPrivilegeSet get_privilege_set(HiveObjectType objectType, String db_name, String table_name, List<String> part_values, String column_name, String user_name, List<String> group_names) throws HiveException
objectType
- hive object typedb_name
- database nametable_name
- table namepart_values
- partition valuescolumn_name
- column nameuser_name
- user namegroup_names
- group names
HiveException
public List<HiveObjectPrivilege> showPrivilegeGrant(HiveObjectType objectType, String principalName, PrincipalType principalType, String dbName, String tableName, List<String> partValues, String columnName) throws HiveException
objectType
- hive object typeprincipalName
- principalType
- dbName
- tableName
- partValues
- columnName
-
HiveException
protected static void copyFiles(org.apache.hadoop.fs.Path srcf, org.apache.hadoop.fs.Path destf, org.apache.hadoop.fs.FileSystem fs) throws HiveException
HiveException
protected static void replaceFiles(org.apache.hadoop.fs.Path srcf, org.apache.hadoop.fs.Path destf, org.apache.hadoop.fs.Path oldPath, org.apache.hadoop.conf.Configuration conf) throws HiveException
srcf
- Source directory to be renamed to tmppath. It should be a
leaf directory where the final data files reside. However it
could potentially contain subdirectories as well.destf
- The directory where the final data needs to gooldPath
- The directory where the old data location, need to be cleaned up.
HiveException
public static List<FieldSchema> getFieldsFromDeserializer(String name, Deserializer serde) throws HiveException
HiveException
public List<Index> getIndexes(String dbName, String tblName, short max) throws HiveException
HiveException
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |