|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.hive.ql.metadata.Table
public class Table
A Hive Table: is a fundamental unit of data in Hive that shares a common schema/DDL
Constructor Summary | |
---|---|
protected |
Table()
Table (only used internally) |
|
Table(String name)
|
|
Table(String name,
Properties schema,
Deserializer deserializer,
Class<? extends org.apache.hadoop.mapred.InputFormat<?,?>> inputFormatClass,
Class<? extends org.apache.hadoop.mapred.OutputFormat<?,?>> outputFormatClass,
URI dataLocation,
Hive hive)
Table Create a TableMetaInfo object presumably with the intent of saving it to the metastore |
Method Summary | |
---|---|
void |
checkValidity()
|
protected void |
copyFiles(org.apache.hadoop.fs.Path srcf)
Inserts files specified into the partition. |
Map<String,String> |
createSpec(Partition tp)
Creates a partition name -> value spec map object |
List<FieldSchema> |
getAllCols()
Returns a list of all the columns of the table (data columns + partition columns in that order. |
List<String> |
getBucketCols()
|
String |
getBucketingDimensionId()
|
List<FieldSchema> |
getCols()
|
URI |
getDataLocation()
|
String |
getDbName()
|
Deserializer |
getDeserializer()
|
StructField |
getField(String fld)
|
Vector<StructField> |
getFields()
|
Class<? extends org.apache.hadoop.mapred.InputFormat> |
getInputFormatClass()
|
String |
getName()
|
int |
getNumBuckets()
|
Class<? extends org.apache.hadoop.mapred.OutputFormat> |
getOutputFormatClass()
|
String |
getOwner()
|
Map<String,String> |
getParameters()
|
List<FieldSchema> |
getPartCols()
|
org.apache.hadoop.fs.Path |
getPath()
|
String |
getProperty(String name)
getProperty |
int |
getRetention()
|
Properties |
getSchema()
|
String |
getSerdeParam(String param)
|
String |
getSerializationLib()
|
List<Order> |
getSortCols()
|
Table |
getTTable()
|
protected void |
initSerDe()
|
boolean |
isPartitioned()
|
boolean |
isPartitionKey(String colName)
|
boolean |
isValidSpec(Map<String,String> spec)
|
void |
reinitSerDe()
|
protected void |
replaceFiles(org.apache.hadoop.fs.Path srcf,
org.apache.hadoop.fs.Path tmpd)
Replaces files in the partition with new data set specified by srcf. |
void |
setBucketCols(List<String> bucketCols)
|
void |
setDataLocation(URI uri2)
|
void |
setDeserializer(Deserializer deserializer)
|
void |
setFields(List<FieldSchema> fields)
|
void |
setInputFormatClass(Class<? extends org.apache.hadoop.mapred.InputFormat> inputFormatClass)
|
void |
setInputFormatClass(String name)
|
void |
setNumBuckets(int nb)
|
void |
setOutputFormatClass(Class<? extends org.apache.hadoop.mapred.OutputFormat> outputFormatClass)
|
void |
setOutputFormatClass(String name)
|
void |
setOwner(String owner)
|
void |
setPartCols(List<FieldSchema> partCols)
|
void |
setProperty(String name,
String value)
|
void |
setRetention(int retention)
|
void |
setSchema(Properties schema)
|
String |
setSerdeParam(String param,
String value)
|
void |
setSerializationLib(String lib)
|
void |
setSortCols(List<Order> sortOrder)
|
protected void |
setTTable(Table table)
|
String |
toString()
|
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait |
Constructor Detail |
---|
protected Table() throws HiveException
HiveException
public Table(String name, Properties schema, Deserializer deserializer, Class<? extends org.apache.hadoop.mapred.InputFormat<?,?>> inputFormatClass, Class<? extends org.apache.hadoop.mapred.OutputFormat<?,?>> outputFormatClass, URI dataLocation, Hive hive) throws HiveException
name
- the name of this table in the metadbschema
- an object that represents the schema that this SerDe must knowdeserializer
- a Class to be used for deserializing the datadataLocation
- where is the table ? (e.g., dfs://hadoop001.sf2p.facebook.com:9000/user/facebook/warehouse/example) NOTE: should not be hardcoding this, but ok for now
HiveException
- on internal error. Note not possible now, but in the future reserve the right to throw an exceptionpublic Table(String name)
Method Detail |
---|
public void reinitSerDe() throws HiveException
HiveException
protected void initSerDe() throws HiveException
HiveException
public void checkValidity() throws HiveException
HiveException
public void setInputFormatClass(Class<? extends org.apache.hadoop.mapred.InputFormat> inputFormatClass)
inputFormatClass
- public void setOutputFormatClass(Class<? extends org.apache.hadoop.mapred.OutputFormat> outputFormatClass)
outputFormatClass
- public final Properties getSchema()
public final org.apache.hadoop.fs.Path getPath()
public final String getName()
public final URI getDataLocation()
public final Deserializer getDeserializer()
public final Class<? extends org.apache.hadoop.mapred.InputFormat> getInputFormatClass()
public final Class<? extends org.apache.hadoop.mapred.OutputFormat> getOutputFormatClass()
public final boolean isValidSpec(Map<String,String> spec) throws HiveException
HiveException
public void setProperty(String name, String value)
public String getProperty(String name)
public Vector<StructField> getFields()
public StructField getField(String fld)
public void setSchema(Properties schema)
schema
- the schema to setpublic void setDeserializer(Deserializer deserializer)
deserializer
- the deserializer to setpublic String toString()
toString
in class Object
public List<FieldSchema> getPartCols()
public boolean isPartitionKey(String colName)
public String getBucketingDimensionId()
public Table getTTable()
protected void setTTable(Table table)
table
- the tTable to setpublic void setDataLocation(URI uri2)
public void setBucketCols(List<String> bucketCols) throws HiveException
HiveException
public void setSortCols(List<Order> sortOrder) throws HiveException
HiveException
public List<FieldSchema> getCols()
public List<FieldSchema> getAllCols()
public void setPartCols(List<FieldSchema> partCols)
public String getDbName()
public int getNumBuckets()
protected void replaceFiles(org.apache.hadoop.fs.Path srcf, org.apache.hadoop.fs.Path tmpd) throws HiveException
srcf
- Files to be replaced. Leaf directories or globbed file pathstmpd
- Temporary directory
HiveException
protected void copyFiles(org.apache.hadoop.fs.Path srcf) throws HiveException
srcf
- Files to be moved. Leaf directories or globbed file paths
HiveException
public void setInputFormatClass(String name) throws HiveException
HiveException
public void setOutputFormatClass(String name) throws HiveException
HiveException
public boolean isPartitioned()
public void setFields(List<FieldSchema> fields)
public void setNumBuckets(int nb)
public String getOwner()
Table.getOwner()
public Map<String,String> getParameters()
Table.getParameters()
public int getRetention()
Table.getRetention()
public void setOwner(String owner)
owner
- Table.setOwner(java.lang.String)
public void setRetention(int retention)
retention
- Table.setRetention(int)
public void setSerializationLib(String lib)
public String getSerializationLib()
public String getSerdeParam(String param)
public String setSerdeParam(String param, String value)
public List<String> getBucketCols()
public List<Order> getSortCols()
public Map<String,String> createSpec(Partition tp)
tp
- Use the information from this partition.
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |