Modifier and Type | Method and Description |
---|---|
Map<LoadTableDesc,WriteEntity> |
Context.getLoadTableOutputMap() |
Map<WriteEntity,List<HiveLockObj>> |
Context.getOutputLockObjects() |
HashSet<WriteEntity> |
QueryPlan.getOutputs() |
Modifier and Type | Method and Description |
---|---|
void |
QueryPlan.setOutputs(HashSet<WriteEntity> outputs) |
Modifier and Type | Method and Description |
---|---|
Set<WriteEntity> |
HookContext.getOutputs() |
Modifier and Type | Method and Description |
---|---|
void |
PostExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LineageInfo linfo,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
PreExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
PostExecute.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LineageInfo lInfo,
org.apache.hadoop.security.UserGroupInformation ugi)
Deprecated.
|
void |
PreExecute.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi)
Deprecated.
|
void |
UpdateInputAccessTimeHook.PreExec.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
EnforceReadOnlyTables.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi,
boolean isExplain) |
void |
HookContext.setOutputs(Set<WriteEntity> outputs) |
Modifier and Type | Method and Description |
---|---|
List<Task<?>> |
TableBasedIndexHandler.generateIndexBuildTaskList(Table baseTbl,
Index index,
List<Partition> indexTblPartitions,
List<Partition> baseTblPartitions,
Table indexTbl,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
List<Task<?>> |
HiveIndexHandler.generateIndexBuildTaskList(Table baseTbl,
Index index,
List<Partition> indexTblPartitions,
List<Partition> baseTblPartitions,
Table indexTbl,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
Requests that the handler generate a plan for building the index; the plan
should read the base table and write out the index representation.
|
protected Task<?> |
TableBasedIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Index index,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
protected Task<?> |
AggregateIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Index index,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
protected Task<?> |
TableBasedIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Method and Description |
---|---|
protected Task<?> |
BitmapIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Method and Description |
---|---|
protected Task<?> |
CompactIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Method and Description |
---|---|
Set<WriteEntity> |
GenMRProcContext.getOutputs()
Get the output set.
|
Modifier and Type | Method and Description |
---|---|
static Task<?> |
IndexUtils.createRootTask(HiveConf builderConf,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
StringBuilder command,
LinkedHashMap<String,String> partSpec,
String indexTableName,
String dbName) |
Constructor and Description |
---|
GenMRProcContext(HiveConf conf,
HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> opTaskMap,
ParseContext parseCtx,
List<Task<MoveWork>> mvTask,
List<Task<? extends Serializable>> rootTasks,
LinkedHashMap<Operator<? extends OperatorDesc>,GenMRProcContext.GenMapRedCtx> mapCurrCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
protected HashSet<WriteEntity> |
BaseSemanticAnalyzer.outputs
List of WriteEntities that are passed to the hooks.
|
Set<WriteEntity> |
GenTezProcContext.outputs |
Set<WriteEntity> |
OptimizeTezProcContext.outputs |
Modifier and Type | Method and Description |
---|---|
protected WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(org.apache.hadoop.fs.Path location) |
static WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(org.apache.hadoop.fs.Path location,
HiveConf conf) |
protected WriteEntity |
BaseSemanticAnalyzer.toWriteEntity(String location) |
Modifier and Type | Method and Description |
---|---|
HashSet<WriteEntity> |
BaseSemanticAnalyzer.getAllOutputs() |
HashSet<WriteEntity> |
SemanticAnalyzer.getAllOutputs() |
Set<WriteEntity> |
HiveSemanticAnalyzerHookContextImpl.getOutputs() |
Set<WriteEntity> |
HiveSemanticAnalyzerHookContext.getOutputs() |
HashSet<WriteEntity> |
BaseSemanticAnalyzer.getOutputs() |
HashSet<WriteEntity> |
EximUtil.SemanticAnalyzerWrapperContext.getOutputs() |
Modifier and Type | Method and Description |
---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TaskCompiler.optimizeOperatorPlan(ParseContext pCtxSet,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.optimizeOperatorPlan(ParseContext pCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
static void |
ExportSemanticAnalyzer.prepareExport(ASTNode ast,
URI toURI,
BaseSemanticAnalyzer.TableSpec ts,
ReplicationSpec replicationSpec,
Hive db,
HiveConf conf,
Context ctx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.slf4j.Logger LOG) |
Constructor and Description |
---|
GenTezProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
OptimizeTezProcContext(HiveConf conf,
ParseContext parseContext,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
SemanticAnalyzerWrapperContext(HiveConf conf,
Hive db,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
List<Task<? extends Serializable>> tasks,
org.slf4j.Logger LOG,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
Set<WriteEntity> |
GenSparkProcContext.outputs |
Modifier and Type | Method and Description |
---|---|
Set<WriteEntity> |
OptimizeSparkProcContext.getOutputs() |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.optimizeOperatorPlan(ParseContext pCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Constructor and Description |
---|
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,TableScanOperator> topOps) |
OptimizeSparkProcContext(HiveConf conf,
ParseContext parseContext,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
protected HashSet<WriteEntity> |
MoveWork.outputs
List of WriteEntities that are passed to the hooks.
|
protected HashSet<WriteEntity> |
DDLWork.outputs
List of WriteEntities that are passed to the hooks.
|
Modifier and Type | Method and Description |
---|---|
HashSet<WriteEntity> |
MoveWork.getOutputs() |
HashSet<WriteEntity> |
DDLWork.getOutputs() |
Modifier and Type | Method and Description |
---|---|
void |
MoveWork.setOutputs(HashSet<WriteEntity> outputs) |
void |
DDLWork.setOutputs(HashSet<WriteEntity> outputs) |
Copyright © 2021 The Apache Software Foundation. All rights reserved.