|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
Packages that use TableDesc | |
---|---|
org.apache.hadoop.hive.ql.exec | Hive QL execution tasks, operators, functions and other handlers. |
org.apache.hadoop.hive.ql.exec.persistence | |
org.apache.hadoop.hive.ql.io | |
org.apache.hadoop.hive.ql.io.rcfile.merge | |
org.apache.hadoop.hive.ql.metadata | |
org.apache.hadoop.hive.ql.optimizer | |
org.apache.hadoop.hive.ql.parse | |
org.apache.hadoop.hive.ql.plan |
Uses of TableDesc in org.apache.hadoop.hive.ql.exec |
---|
Fields in org.apache.hadoop.hive.ql.exec declared as TableDesc | |
---|---|
static TableDesc |
Utilities.defaultTd
|
protected TableDesc[] |
CommonJoinOperator.spillTableDesc
|
Methods in org.apache.hadoop.hive.ql.exec that return TableDesc | |
---|---|
TableDesc |
FetchOperator.getCurrTbl()
|
static TableDesc |
JoinUtil.getSpillTableDesc(Byte alias,
TableDesc[] spillTableDesc,
JoinDesc conf,
boolean noFilter)
|
TableDesc |
TableScanOperator.getTableDesc()
|
static TableDesc |
Utilities.getTableDesc(String cols,
String colTypes)
|
static TableDesc |
Utilities.getTableDesc(Table tbl)
|
TableDesc |
FetchTask.getTblDesc()
Return the tableDesc of the fetchWork. |
static TableDesc[] |
JoinUtil.initSpillTables(JoinDesc conf,
boolean noFilter)
|
Methods in org.apache.hadoop.hive.ql.exec with parameters of type TableDesc | |
---|---|
static void |
Utilities.copyTableJobPropertiesToConf(TableDesc tbl,
JobConf job)
Copies the storage handler properties configured for a table descriptor to a runtime job configuration. |
static PartitionDesc |
Utilities.getPartitionDescFromTableDesc(TableDesc tblDesc,
Partition part)
|
static RowContainer<List<Object>> |
JoinUtil.getRowContainer(Configuration hconf,
List<ObjectInspector> structFieldObjectInspectors,
Byte alias,
int containerSize,
TableDesc[] spillTableDesc,
JoinDesc conf,
boolean noFilter,
Reporter reporter)
|
static SerDe |
JoinUtil.getSpillSerDe(byte alias,
TableDesc[] spillTableDesc,
JoinDesc conf,
boolean noFilter)
|
static TableDesc |
JoinUtil.getSpillTableDesc(Byte alias,
TableDesc[] spillTableDesc,
JoinDesc conf,
boolean noFilter)
|
void |
FetchOperator.setCurrTbl(TableDesc currTbl)
|
void |
TableScanOperator.setTableDesc(TableDesc tableDesc)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.exec.persistence |
---|
Methods in org.apache.hadoop.hive.ql.exec.persistence that return TableDesc | |
---|---|
static TableDesc |
PTFRowContainer.createTableDesc(StructObjectInspector oI)
|
Methods in org.apache.hadoop.hive.ql.exec.persistence with parameters of type TableDesc | |
---|---|
void |
RowContainer.setTableDesc(TableDesc tblDesc)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.io |
---|
Methods in org.apache.hadoop.hive.ql.io with parameters of type TableDesc | |
---|---|
static FileSinkOperator.RecordWriter |
HiveFileFormatUtils.getHiveRecordWriter(JobConf jc,
TableDesc tableInfo,
Class<? extends Writable> outputClass,
FileSinkDesc conf,
Path outPath,
Reporter reporter)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.io.rcfile.merge |
---|
Methods in org.apache.hadoop.hive.ql.io.rcfile.merge with parameters of type TableDesc | |
---|---|
void |
MergeWork.resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf,
Path path,
TableDesc tblDesc,
ArrayList<String> aliases,
PartitionDesc partDesc)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.metadata |
---|
Methods in org.apache.hadoop.hive.ql.metadata with parameters of type TableDesc | |
---|---|
void |
DefaultStorageHandler.configureInputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
|
void |
HiveStorageHandler.configureInputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance to populate the JobContext.getConfiguration() with properties that maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc). |
void |
DefaultStorageHandler.configureJobConf(TableDesc tableDesc,
JobConf jobConf)
|
void |
HiveStorageHandler.configureJobConf(TableDesc tableDesc,
JobConf jobConf)
Called just before submitting MapReduce job. |
void |
DefaultStorageHandler.configureOutputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
|
void |
HiveStorageHandler.configureOutputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance to populate the JobContext.getConfiguration() with properties that maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc). |
void |
DefaultStorageHandler.configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
|
void |
HiveStorageHandler.configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
Deprecated. |
Uses of TableDesc in org.apache.hadoop.hive.ql.optimizer |
---|
Methods in org.apache.hadoop.hive.ql.optimizer that return types with arguments of type TableDesc | |
---|---|
List<TableDesc> |
GenMRProcContext.GenMRUnionCtx.getTTDesc()
|
Methods in org.apache.hadoop.hive.ql.optimizer with parameters of type TableDesc | |
---|---|
void |
GenMRProcContext.GenMRUnionCtx.addTTDesc(TableDesc tt_desc)
|
static void |
GenMapRedUtils.setTaskPlan(String path,
String alias,
Operator<? extends OperatorDesc> topOp,
MapWork plan,
boolean local,
TableDesc tt_desc)
set the current task in the mapredWork. |
Uses of TableDesc in org.apache.hadoop.hive.ql.parse |
---|
Methods in org.apache.hadoop.hive.ql.parse that return TableDesc | |
---|---|
TableDesc |
ParseContext.getFetchTabledesc()
|
Methods in org.apache.hadoop.hive.ql.parse with parameters of type TableDesc | |
---|---|
void |
ParseContext.setFetchTabledesc(TableDesc fetchTabledesc)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.plan |
---|
Methods in org.apache.hadoop.hive.ql.plan that return TableDesc | |
---|---|
static TableDesc |
PlanUtils.getDefaultQueryOutputTableDesc(String cols,
String colTypes,
String fileFormat)
|
static TableDesc |
PlanUtils.getDefaultTableDesc(CreateTableDesc localDirectoryDesc,
String cols,
String colTypes)
|
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode. |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string). |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getIntermediateFileTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for intermediate files. |
TableDesc |
ReduceWork.getKeyDesc()
|
TableDesc |
ReduceSinkDesc.getKeySerializeInfo()
|
TableDesc |
JoinDesc.getKeyTableDesc()
|
TableDesc |
HashTableSinkDesc.getKeyTableDesc()
|
TableDesc |
HashTableSinkDesc.getKeyTblDesc()
|
TableDesc |
MapJoinDesc.getKeyTblDesc()
|
static TableDesc |
PlanUtils.getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for Map-side join key. |
static TableDesc |
PlanUtils.getMapJoinValueTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for Map-side join key. |
static TableDesc |
PlanUtils.getReduceKeyTableDesc(List<FieldSchema> fieldSchemas,
String order)
Generate the table descriptor for reduce key. |
static TableDesc |
PlanUtils.getReduceValueTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for intermediate files. |
TableDesc |
ScriptDesc.getScriptErrInfo()
|
TableDesc |
ScriptDesc.getScriptInputInfo()
|
TableDesc |
ScriptDesc.getScriptOutputInfo()
|
TableDesc |
LoadTableDesc.getTable()
|
TableDesc |
PartitionDesc.getTableDesc()
|
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns)
Generate the table descriptor of given serde with the separatorCode and column names (comma separated string). |
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of the serde specified with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine)
|
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine,
boolean useDelimitedJSON)
|
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine,
boolean useDelimitedJSON,
String fileFormat)
|
static TableDesc |
PlanUtils.getTableDesc(CreateTableDesc crtTblDesc,
String cols,
String colTypes)
Generate a table descriptor from a createTableDesc. |
TableDesc |
FileSinkDesc.getTableInfo()
|
TableDesc |
HashTableDummyDesc.getTbl()
|
TableDesc |
FetchWork.getTblDesc()
|
TableDesc |
ReduceSinkDesc.getValueSerializeInfo()
|
Methods in org.apache.hadoop.hive.ql.plan that return types with arguments of type TableDesc | |
---|---|
List<TableDesc> |
DemuxDesc.getKeysSerializeInfos()
|
Map<Byte,TableDesc> |
JoinDesc.getSkewKeysValuesTables()
|
Map<Byte,TableDesc> |
HashTableSinkDesc.getSkewKeysValuesTables()
|
List<TableDesc> |
ReduceWork.getTagToValueDesc()
|
List<TableDesc> |
MapJoinDesc.getValueFilteredTblDescs()
|
List<TableDesc> |
DemuxDesc.getValuesSerializeInfos()
|
List<TableDesc> |
HashTableSinkDesc.getValueTblDescs()
|
List<TableDesc> |
MapJoinDesc.getValueTblDescs()
|
List<TableDesc> |
HashTableSinkDesc.getValueTblFilteredDescs()
|
Methods in org.apache.hadoop.hive.ql.plan with parameters of type TableDesc | |
---|---|
static void |
PlanUtils.configureInputJobPropertiesForStorageHandler(TableDesc tableDesc)
Loads the storage handler (if one exists) for the given table and invokes HiveStorageHandler.configureInputJobProperties(TableDesc, java.util.Map) . |
static void |
PlanUtils.configureJobConf(TableDesc tableDesc,
JobConf jobConf)
|
static void |
PlanUtils.configureOutputJobPropertiesForStorageHandler(TableDesc tableDesc)
Loads the storage handler (if one exists) for the given table and invokes HiveStorageHandler.configureOutputJobProperties(TableDesc, java.util.Map) . |
void |
MapWork.resolveDynamicPartitionStoredAsSubDirsMerge(HiveConf conf,
Path path,
TableDesc tblDesc,
ArrayList<String> aliases,
PartitionDesc partDesc)
|
void |
ReduceWork.setKeyDesc(TableDesc keyDesc)
If the plan has a reducer and correspondingly a reduce-sink, then store the TableDesc pointing to keySerializeInfo of the ReduceSink |
void |
ReduceSinkDesc.setKeySerializeInfo(TableDesc keySerializeInfo)
|
void |
JoinDesc.setKeyTableDesc(TableDesc keyTblDesc)
|
void |
HashTableSinkDesc.setKeyTableDesc(TableDesc keyTableDesc)
|
void |
HashTableSinkDesc.setKeyTblDesc(TableDesc keyTblDesc)
|
void |
MapJoinDesc.setKeyTblDesc(TableDesc keyTblDesc)
|
void |
ScriptDesc.setScriptErrInfo(TableDesc scriptErrInfo)
|
void |
ScriptDesc.setScriptInputInfo(TableDesc scriptInputInfo)
|
void |
ScriptDesc.setScriptOutputInfo(TableDesc scriptOutputInfo)
|
void |
LoadTableDesc.setTable(TableDesc table)
|
void |
PartitionDesc.setTableDesc(TableDesc tableDesc)
|
void |
FileSinkDesc.setTableInfo(TableDesc tableInfo)
|
void |
HashTableDummyDesc.setTbl(TableDesc tbl)
|
void |
FetchWork.setTblDesc(TableDesc tblDesc)
|
void |
ReduceSinkDesc.setValueSerializeInfo(TableDesc valueSerializeInfo)
|
Method parameters in org.apache.hadoop.hive.ql.plan with type arguments of type TableDesc | |
---|---|
void |
DemuxDesc.setKeysSerializeInfos(List<TableDesc> keysSerializeInfos)
|
void |
JoinDesc.setSkewKeysValuesTables(Map<Byte,TableDesc> skewKeysValuesTables)
|
void |
HashTableSinkDesc.setSkewKeysValuesTables(Map<Byte,TableDesc> skewKeysValuesTables)
|
void |
ReduceWork.setTagToValueDesc(List<TableDesc> tagToValueDesc)
|
void |
MapJoinDesc.setValueFilteredTblDescs(List<TableDesc> valueFilteredTblDescs)
|
void |
DemuxDesc.setValuesSerializeInfos(List<TableDesc> valuesSerializeInfos)
|
void |
HashTableSinkDesc.setValueTblDescs(List<TableDesc> valueTblDescs)
|
void |
MapJoinDesc.setValueTblDescs(List<TableDesc> valueTblDescs)
|
void |
HashTableSinkDesc.setValueTblFilteredDescs(List<TableDesc> valueTblFilteredDescs)
|
Constructors in org.apache.hadoop.hive.ql.plan with parameters of type TableDesc | |
---|---|
FetchWork(List<String> partDir,
List<PartitionDesc> partDesc,
TableDesc tblDesc)
|
|
FetchWork(List<String> partDir,
List<PartitionDesc> partDesc,
TableDesc tblDesc,
int limit)
|
|
FetchWork(String tblDir,
TableDesc tblDesc)
|
|
FetchWork(String tblDir,
TableDesc tblDesc,
int limit)
|
|
FileSinkDesc(String dirName,
TableDesc tableInfo,
boolean compressed)
|
|
FileSinkDesc(String dirName,
TableDesc tableInfo,
boolean compressed,
int destTableId,
boolean multiFileSpray,
boolean canBeMerged,
int numFiles,
int totalFiles,
ArrayList<ExprNodeDesc> partitionCols,
DynamicPartitionCtx dpCtx)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
DynamicPartitionCtx dpCtx)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
Map<String,String> partitionSpec)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
Map<String,String> partitionSpec,
boolean replace)
|
|
MapJoinDesc(Map<Byte,List<ExprNodeDesc>> keys,
TableDesc keyTblDesc,
Map<Byte,List<ExprNodeDesc>> values,
List<TableDesc> valueTblDescs,
List<TableDesc> valueFilteredTblDescs,
List<String> outputColumnNames,
int posBigTable,
JoinCondDesc[] conds,
Map<Byte,List<ExprNodeDesc>> filters,
boolean noOuterJoin,
String dumpFilePrefix)
|
|
PartitionDesc(Partition part,
TableDesc tblDesc)
|
|
PartitionDesc(TableDesc table,
LinkedHashMap<String,String> partSpec)
|
|
PartitionDesc(TableDesc table,
LinkedHashMap<String,String> partSpec,
Class<? extends Deserializer> serdeClass,
Class<? extends InputFormat> inputFileFormatClass,
Class<?> outputFormat,
Properties properties,
String serdeClassName)
|
|
ReduceSinkDesc(ArrayList<ExprNodeDesc> keyCols,
int numDistributionKeys,
ArrayList<ExprNodeDesc> valueCols,
ArrayList<String> outputKeyColumnNames,
List<List<Integer>> distinctColumnIndices,
ArrayList<String> outputValueColumnNames,
int tag,
ArrayList<ExprNodeDesc> partitionCols,
int numReducers,
TableDesc keySerializeInfo,
TableDesc valueSerializeInfo)
|
|
ScriptDesc(String scriptCmd,
TableDesc scriptInputInfo,
Class<? extends RecordWriter> inRecordWriterClass,
TableDesc scriptOutputInfo,
Class<? extends RecordReader> outRecordReaderClass,
Class<? extends RecordReader> errRecordReaderClass,
TableDesc scriptErrInfo)
|
Constructor parameters in org.apache.hadoop.hive.ql.plan with type arguments of type TableDesc | |
---|---|
DemuxDesc(Map<Integer,Integer> newTagToOldTag,
Map<Integer,Integer> newTagToChildIndex,
Map<Integer,Integer> childIndexToOriginalNumParents,
List<TableDesc> keysSerializeInfos,
List<TableDesc> valuesSerializeInfos)
|
|
DemuxDesc(Map<Integer,Integer> newTagToOldTag,
Map<Integer,Integer> newTagToChildIndex,
Map<Integer,Integer> childIndexToOriginalNumParents,
List<TableDesc> keysSerializeInfos,
List<TableDesc> valuesSerializeInfos)
|
|
MapJoinDesc(Map<Byte,List<ExprNodeDesc>> keys,
TableDesc keyTblDesc,
Map<Byte,List<ExprNodeDesc>> values,
List<TableDesc> valueTblDescs,
List<TableDesc> valueFilteredTblDescs,
List<String> outputColumnNames,
int posBigTable,
JoinCondDesc[] conds,
Map<Byte,List<ExprNodeDesc>> filters,
boolean noOuterJoin,
String dumpFilePrefix)
|
|
MapJoinDesc(Map<Byte,List<ExprNodeDesc>> keys,
TableDesc keyTblDesc,
Map<Byte,List<ExprNodeDesc>> values,
List<TableDesc> valueTblDescs,
List<TableDesc> valueFilteredTblDescs,
List<String> outputColumnNames,
int posBigTable,
JoinCondDesc[] conds,
Map<Byte,List<ExprNodeDesc>> filters,
boolean noOuterJoin,
String dumpFilePrefix)
|
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |