Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
DriverContext.getRunnable(int maxthreads) |
Modifier and Type | Method and Description |
---|---|
ArrayList<Task<? extends Serializable>> |
QueryPlan.getRootTasks() |
Modifier and Type | Method and Description |
---|---|
boolean |
DriverContext.addToRunnable(Task<? extends Serializable> tsk) |
static boolean |
DriverContext.isLaunchable(Task<? extends Serializable> tsk)
Checks if a task can be launched.
|
void |
DriverContext.remove(Task<? extends Serializable> task) |
<T extends Serializable> |
QueryDisplay.TaskDisplay.updateStatus(Task<T> tTask) |
<T extends Serializable> |
QueryDisplay.updateTaskStatus(Task<T> tTask) |
Modifier and Type | Method and Description |
---|---|
void |
HookRunner.runPostAnalyzeHooks(HiveSemanticAnalyzerHookContext hookCtx,
List<Task<? extends Serializable>> allRootTasks) |
void |
QueryPlan.setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) |
Constructor and Description |
---|
TaskDisplay(Task task) |
Modifier and Type | Class and Description |
---|---|
class |
ColumnStatsUpdateTask
ColumnStatsUpdateTask implementation.
|
class |
ConditionalTask
Conditional Task implementation.
|
class |
CopyTask
CopyTask implementation.
|
class |
DDLTask
DDLTask implementation.
|
class |
DependencyCollectionTask
DependencyCollectionTask.
|
class |
ExplainSQRewriteTask |
class |
ExplainTask
ExplainTask implementation.
|
class |
ExportTask |
class |
FetchTask
FetchTask implementation.
|
class |
FunctionTask
FunctionTask.
|
class |
MaterializedViewTask
This task does some work related to materialized views.
|
class |
MoveTask
MoveTask implementation.
|
class |
ReplCopyTask |
class |
ReplTxnTask
ReplTxnTask.
|
class |
StatsTask
StatsTask implementation.
|
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
Task.backupTask |
protected Task<? extends Serializable> |
TaskRunner.tsk |
Modifier and Type | Field and Description |
---|---|
protected List<Task<? extends Serializable>> |
Task.backupChildrenTasks |
protected List<Task<? extends Serializable>> |
Task.childTasks |
protected List<Task<? extends Serializable>> |
Task.feedSubscribers |
protected List<Task<? extends Serializable>> |
Task.parentTasks |
Class<? extends Task<T>> |
TaskFactory.TaskTuple.taskClass |
Modifier and Type | Method and Description |
---|---|
static <T extends Serializable> |
TaskFactory.get(T work) |
static <T extends Serializable> |
TaskFactory.get(T work,
HiveConf conf) |
Task<? extends Serializable> |
Task.getAndInitBackupTask() |
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
Task<? extends Serializable> |
Task.getBackupTask() |
static Task<?> |
ReplCopyTask.getLoadCopyTask(ReplicationSpec replicationSpec,
org.apache.hadoop.fs.Path srcPath,
org.apache.hadoop.fs.Path dstPath,
HiveConf conf) |
Task<? extends Serializable> |
DDLTask.getSubtask() |
Task<? extends Serializable> |
TaskRunner.getTask() |
Modifier and Type | Method and Description |
---|---|
static List<Task<? extends Serializable>> |
Task.findLeafs(List<Task<? extends Serializable>> rootTasks) |
List<Task<? extends Serializable>> |
Task.getBackupChildrenTasks() |
List<Task<? extends Serializable>> |
Task.getChildTasks() |
List<Task<? extends Serializable>> |
Task.getDependentTasks()
The default dependent tasks are just child tasks, but different types could implement their own
(e.g.
|
List<Task<? extends Serializable>> |
ConditionalTask.getDependentTasks() |
List<Task<? extends Serializable>> |
Task.getFeedSubscribers() |
List<Task<? extends Serializable>> |
ConditionalTask.getListTasks() |
List<Task<? extends Serializable>> |
Task.getParentTasks() |
Modifier and Type | Method and Description |
---|---|
boolean |
Task.addDependentTask(Task<? extends Serializable> dependent)
Add a dependent task on the current task.
|
boolean |
ConditionalTask.addDependentTask(Task<? extends Serializable> dependent)
Add a dependent task on the current conditional task.
|
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<? extends Serializable>... tasklist) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<? extends Serializable>... tasklist) |
void |
Task.removeDependentTask(Task<? extends Serializable> dependent)
Remove the dependent task.
|
static void |
Utilities.reworkMapRedWork(Task<? extends Serializable> task,
boolean reworkMapredWork,
HiveConf conf)
The check here is kind of not clean.
|
void |
Task.setBackupTask(Task<? extends Serializable> backupTask) |
void |
Task.subscribeFeed(Task<? extends Serializable> publisher)
Subscribe the feed of publisher.
|
Modifier and Type | Method and Description |
---|---|
static List<Task<? extends Serializable>> |
Task.findLeafs(List<Task<? extends Serializable>> rootTasks) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType) |
static List<ExecDriver> |
Utilities.getMRTasks(List<Task<? extends Serializable>> tasks) |
static int |
Utilities.getNumClusterJobs(List<Task<? extends Serializable>> tasks) |
static List<SparkTask> |
Utilities.getSparkTasks(List<Task<? extends Serializable>> tasks) |
static List<TezTask> |
Utilities.getTezTasks(List<Task<? extends Serializable>> tasks) |
static <T> void |
NodeUtils.iterateTask(Collection<Task<? extends Serializable>> tasks,
Class<T> clazz,
NodeUtils.Function<T> function) |
org.json.JSONObject |
ExplainTask.outputDependencies(PrintStream out,
boolean jsonOutput,
boolean appendTaskType,
List<Task> tasks) |
org.json.JSONObject |
ExplainTask.outputStagePlans(PrintStream out,
List<Task> tasks,
boolean jsonOutput,
boolean isExtended) |
void |
Task.setBackupChildrenTasks(List<Task<? extends Serializable>> backupChildrenTasks) |
void |
Task.setChildTasks(List<Task<? extends Serializable>> childTasks) |
void |
Task.setFeedSubscribers(List<Task<? extends Serializable>> s) |
void |
ConditionalTask.setListTasks(List<Task<? extends Serializable>> listTasks) |
void |
Task.setParentTasks(List<Task<? extends Serializable>> parentTasks) |
Constructor and Description |
---|
TaskRunner(Task<? extends Serializable> tsk) |
Constructor and Description |
---|
TaskTuple(Class<T> workClass,
Class<? extends Task<T>> taskClass) |
Modifier and Type | Class and Description |
---|---|
class |
ExecDriver
ExecDriver is the central class in co-ordinating execution of any map-reduce task.
|
class |
MapredLocalTask
MapredLocalTask represents any local work (i.e.: client side work) that hive needs to
execute.
|
class |
MapRedTask
Extension of ExecDriver:
- can optionally spawn a map-reduce task from a separate jvm
- will make last minute adjustments to map-reduce job parameters, viz:
* estimating number of reducers
* estimating whether job should run locally
|
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
HadoopJobExecHelper.task |
Constructor and Description |
---|
HadoopJobExecHelper(org.apache.hadoop.mapred.JobConf job,
SessionState.LogHelper console,
Task<? extends Serializable> task,
HadoopJobExecHook hookCallBack) |
Modifier and Type | Class and Description |
---|---|
class |
ReplDumpTask |
class |
ReplStateLogTask
ReplStateLogTask.
|
Modifier and Type | Method and Description |
---|---|
static Task<?> |
ReplUtils.getTableCheckpointTask(ImportTableDesc tableDesc,
HashMap<String,String> partSpec,
String dumpRoot,
HiveConf conf) |
static Task<?> |
ReplUtils.getTableReplLogTask(ImportTableDesc tableDesc,
ReplLogger replLogger,
HiveConf conf) |
Modifier and Type | Class and Description |
---|---|
class |
ReplLoadTask |
Modifier and Type | Method and Description |
---|---|
void |
AddDependencyToLeaves.process(Task<? extends Serializable> task) |
boolean |
AddDependencyToLeaves.skipProcessing(Task<? extends Serializable> task) |
Constructor and Description |
---|
AddDependencyToLeaves(Task<? extends Serializable> postDependencyTask) |
Modifier and Type | Method and Description |
---|---|
List<Task<? extends Serializable>> |
TaskTracker.tasks() |
Modifier and Type | Method and Description |
---|---|
void |
TaskTracker.addDependentTask(Task<? extends Serializable> dependent) |
void |
TaskTracker.addTask(Task<? extends Serializable> task)
this method is used to identify all the tasks in a graph.
|
Modifier and Type | Class and Description |
---|---|
class |
SparkTask |
Modifier and Type | Class and Description |
---|---|
class |
TezTask
TezTask handles the execution of TezWork.
|
Modifier and Type | Method and Description |
---|---|
void |
DAGTraversal.Function.process(Task<? extends Serializable> task) |
boolean |
DAGTraversal.Function.skipProcessing(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
static void |
DAGTraversal.traverse(List<Task<? extends Serializable>> tasks,
DAGTraversal.Function function) |
Modifier and Type | Method and Description |
---|---|
void |
HiveHistoryImpl.endTask(String queryId,
Task<? extends Serializable> task) |
void |
HiveHistory.endTask(String queryId,
Task<? extends Serializable> task)
Called at the end of a task.
|
void |
HiveHistoryImpl.progressTask(String queryId,
Task<? extends Serializable> task) |
void |
HiveHistory.progressTask(String queryId,
Task<? extends Serializable> task)
Logs progress of a task if ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS is
set to true
|
void |
HiveHistoryImpl.startTask(String queryId,
Task<? extends Serializable> task,
String taskName) |
void |
HiveHistory.startTask(String queryId,
Task<? extends Serializable> task,
String taskName)
Called at the start of a task.
|
Modifier and Type | Class and Description |
---|---|
class |
MergeFileTask
Task for fast merging of ORC and RC files.
|
Modifier and Type | Class and Description |
---|---|
class |
ColumnTruncateTask |
Modifier and Type | Method and Description |
---|---|
static Task<MoveWork> |
GenMapRedUtils.findMoveTaskForFsopOutput(List<Task<MoveWork>> mvTasks,
org.apache.hadoop.fs.Path fsopFinalDir,
boolean isMmFsop) |
Task<? extends Serializable> |
GenMRProcContext.getCurrTask() |
Task<? extends Serializable> |
GenMRProcContext.GenMapRedCtx.getCurrTask() |
Task<? extends Serializable> |
GenMRProcContext.GenMRUnionCtx.getUTask() |
Modifier and Type | Method and Description |
---|---|
Map<FileSinkDesc,Task<? extends Serializable>> |
GenMRProcContext.getLinkedFileDescTasks() |
List<Task<MoveWork>> |
GenMRProcContext.getMvTask() |
HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> |
GenMRProcContext.getOpTaskMap() |
List<Task<? extends Serializable>> |
GenMRProcContext.getRootTasks() |
Modifier and Type | Method and Description |
---|---|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<? extends Serializable> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<? extends Serializable> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
boolean |
GenMRProcContext.addRootIfPossible(Task<? extends Serializable> task) |
void |
GenMRProcContext.addSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.addStatsTask(FileSinkOperator nd,
MoveTask mvTask,
Task<? extends Serializable> currTask,
HiveConf hconf)
Add the StatsTask as a dependent task of the MoveTask
because StatsTask will change the Table/Partition metadata.
|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<? extends Serializable> currTask,
LineageState lineageState) |
static void |
GenMapRedUtils.deriveFinalExplainAttributes(Task<? extends Serializable> task,
org.apache.hadoop.conf.Configuration conf)
Called at the end of TaskCompiler::compile to derive final
explain attributes based on previous compilation.
|
static void |
GenMapRedUtils.initUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currTask,
boolean local) |
static void |
GenMapRedUtils.initUnionPlan(ReduceSinkOperator op,
UnionOperator currUnionOp,
GenMRProcContext opProcCtx,
Task<? extends Serializable> unionTask)
Initialize the current union plan.
|
static void |
GenMapRedUtils.internTableDesc(Task<?> task,
com.google.common.collect.Interner<TableDesc> interner) |
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<? extends Serializable> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
boolean |
GenMRProcContext.isSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.joinPlan(Task<? extends Serializable> currTask,
Task<? extends Serializable> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinPlan(Task<? extends Serializable> currTask,
Task<? extends Serializable> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currentUnionTask,
Task<? extends Serializable> existingTask,
boolean local) |
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currentUnionTask,
Task<? extends Serializable> existingTask,
boolean local) |
void |
GenMRProcContext.setCurrTask(Task<? extends Serializable> currTask) |
static void |
GenMapRedUtils.setKeyAndValueDescForTaskTree(Task<? extends Serializable> task)
Set the key and value description for all the tasks rooted at the given
task.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
TableScanOperator topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx)
set the current task in the mapredWork.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
TableScanOperator topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx,
PrunedPartitionList pList)
set the current task in the mapredWork.
|
Modifier and Type | Method and Description |
---|---|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<? extends Serializable> currTask,
LineageState lineageState) |
static Task<MoveWork> |
GenMapRedUtils.findMoveTaskForFsopOutput(List<Task<MoveWork>> mvTasks,
org.apache.hadoop.fs.Path fsopFinalDir,
boolean isMmFsop) |
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<? extends Serializable> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
void |
GenMRProcContext.setLinkedFileDescTasks(Map<FileSinkDesc,Task<? extends Serializable>> linkedFileDescTasks) |
void |
GenMRProcContext.setMvTask(List<Task<MoveWork>> mvTask) |
void |
GenMRProcContext.setOpTaskMap(HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> opTaskMap) |
void |
GenMRProcContext.setRootTasks(List<Task<? extends Serializable>> rootTasks) |
Constructor and Description |
---|
GenMapRedCtx(Task<? extends Serializable> currTask,
String currAliasId) |
GenMRUnionCtx(Task<? extends Serializable> uTask) |
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
PhysicalContext.fetchTask |
Modifier and Type | Field and Description |
---|---|
protected List<Task<? extends Serializable>> |
PhysicalContext.rootTasks |
protected Set<Task<?>> |
StageIDsRearranger.TaskTraverse.traversed |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
SkewJoinResolver.SkewJoinProcCtx.getCurrentTask() |
Task<? extends Serializable> |
MapJoinResolver.LocalMapJoinProcCtx.getCurrentTask() |
Task<? extends Serializable> |
PhysicalContext.getFetchTask() |
Task<? extends Serializable> |
CommonJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<? extends Serializable> |
SortMergeJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
abstract Task<? extends Serializable> |
AbstractJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Modifier and Type | Method and Description |
---|---|
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
static List<Task> |
StageIDsRearranger.getExplainOrder(HiveConf conf,
List<Task<?>> tasks) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
List<Task<? extends Serializable>> |
PhysicalContext.getRootTasks() |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
Modifier and Type | Method and Description |
---|---|
protected void |
StageIDsRearranger.TaskTraverse.accepted(Task<?> task) |
void |
PhysicalContext.addToRootTask(Task<? extends Serializable> tsk) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
protected boolean |
StageIDsRearranger.TaskTraverse.isReady(Task<?> task) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
static void |
GenMRSkewJoinProcessor.processSkewJoin(JoinOperator joinOp,
Task<? extends Serializable> currTask,
ParseContext parseCtx)
Create tasks for processing skew joins.
|
static void |
GenSparkSkewJoinProcessor.processSkewJoin(JoinOperator joinOp,
Task<? extends Serializable> currTask,
ReduceWork reduceWork,
ParseContext parseCtx) |
protected void |
StageIDsRearranger.TaskTraverse.rejected(Task<?> child) |
void |
PhysicalContext.removeFromRootTask(Task<? extends Serializable> tsk) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<? extends Serializable> currTask,
Task<? extends Serializable> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<? extends Serializable> currTask,
Task<? extends Serializable> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTaskWithConditionalTask(Task<? extends Serializable> currTask,
ConditionalTask cndTsk) |
void |
SkewJoinResolver.SkewJoinProcCtx.setCurrentTask(Task<? extends Serializable> currentTask) |
void |
MapJoinResolver.LocalMapJoinProcCtx.setCurrentTask(Task<? extends Serializable> currentTask) |
void |
PhysicalContext.setFetchTask(Task<? extends Serializable> fetchTask) |
void |
StageIDsRearranger.TaskTraverse.traverse(Task<?> task) |
Modifier and Type | Method and Description |
---|---|
static List<Task> |
StageIDsRearranger.getExplainOrder(HiveConf conf,
List<Task<?>> tasks) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
void |
PhysicalContext.setRootTasks(List<Task<? extends Serializable>> rootTasks) |
Constructor and Description |
---|
LocalMapJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
SkewJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
Constructor and Description |
---|
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
Constructor and Description |
---|
SparkSkewJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
Modifier and Type | Field and Description |
---|---|
List<Task<MoveWork>> |
GenTezProcContext.moveTask |
protected List<Task<?>> |
BaseSemanticAnalyzer.rootTasks |
List<Task<? extends Serializable>> |
GenTezProcContext.rootTasks |
Modifier and Type | Method and Description |
---|---|
List<Task<? extends Serializable>> |
BaseSemanticAnalyzer.getAllRootTasks() |
List<Task<? extends Serializable>> |
SemanticAnalyzer.getAllRootTasks() |
List<Task<? extends Serializable>> |
BaseSemanticAnalyzer.getRootTasks() |
List<Task<? extends Serializable>> |
EximUtil.SemanticAnalyzerWrapperContext.getTasks() |
Modifier and Type | Method and Description |
---|---|
void |
ParseContext.replaceRootTask(Task<? extends Serializable> rootTask,
List<? extends Task<? extends Serializable>> tasks) |
protected abstract void |
TaskCompiler.setInputFormat(Task<? extends Serializable> rootTask) |
protected void |
TezCompiler.setInputFormat(Task<? extends Serializable> task) |
protected void |
MapReduceCompiler.setInputFormat(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
TezCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
MapReduceCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
ParseContext |
TaskCompiler.getParseContext(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks)
Create a clone of the parse context
|
protected abstract void |
TaskCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
TezCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
MapReduceCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
void |
AbstractSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
void |
HiveSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks)
Invoked after Hive performs its own semantic analysis on a
statement (including optimization).
|
void |
ParseContext.replaceRootTask(Task<? extends Serializable> rootTask,
List<? extends Task<? extends Serializable>> tasks) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
Task<? extends Serializable> |
MessageHandler.Context.precursor |
Modifier and Type | Method and Description |
---|---|
List<Task<? extends Serializable>> |
AddPrimaryKeyHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AddUniqueConstraintHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
DropPartitionHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AddNotNullConstraintHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AllocWriteIdHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
DropTableHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AddForeignKeyHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
RenamePartitionHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
DropDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
CommitTxnHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
CreateFunctionHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AlterDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
TruncateTableHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
TableHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
DropFunctionHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
CreateDatabaseHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
OpenTxnHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
DropConstraintHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
TruncatePartitionHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
AbortTxnHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
InsertHandler.handle(MessageHandler.Context withinContext) |
List<Task<? extends Serializable>> |
RenameTableHandler.handle(MessageHandler.Context context) |
List<Task<? extends Serializable>> |
MessageHandler.handle(MessageHandler.Context withinContext) |
List<Task<? extends Serializable>> |
DefaultHandler.handle(MessageHandler.Context withinContext) |
Constructor and Description |
---|
Context(String dbName,
String tableName,
String location,
Task<? extends Serializable> precursor,
DumpMetaData dmd,
HiveConf hiveConf,
Hive db,
Context nestedContext,
org.slf4j.Logger log) |
Modifier and Type | Field and Description |
---|---|
List<Task<MoveWork>> |
GenSparkProcContext.moveTask |
List<Task<? extends Serializable>> |
GenSparkProcContext.rootTasks |
Modifier and Type | Method and Description |
---|---|
static org.apache.hadoop.fs.Path |
GenSparkUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks.
|
protected void |
SparkCompiler.setInputFormat(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
static org.apache.hadoop.fs.Path |
GenSparkUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks.
|
protected void |
SparkCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
Constructor and Description |
---|
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,TableScanOperator> topOps) |
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,TableScanOperator> topOps) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getCommonJoinTask() |
Task<? extends Serializable> |
ImportTableDesc.getCreateTableTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
HiveConf conf) |
Task<? extends Serializable> |
ExplainWork.getFetchTask() |
Task |
StatsWork.getSourceTask() |
Task |
BasicStatsWork.getSourceTask() |
protected Task<? extends Serializable> |
ConditionalResolverCommonJoin.resolveMapJoinTask(ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx ctx,
HiveConf conf) |
Modifier and Type | Method and Description |
---|---|
HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getDirToTaskMap() |
List<Task<? extends Serializable>> |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.getListTasks() |
List<Task<? extends Serializable>> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getNoSkewTask() |
ArrayList<Task<? extends Serializable>> |
ExplainWork.getRootTasks() |
List<Task<? extends Serializable>> |
ConditionalResolverCommonJoin.getTasks(HiveConf conf,
Object objCtx) |
List<Task<? extends Serializable>> |
ConditionalResolverMergeFiles.getTasks(HiveConf conf,
Object objCtx) |
List<Task<? extends Serializable>> |
ConditionalResolver.getTasks(HiveConf conf,
Object ctx)
All conditional resolvers implement this interface.
|
List<Task<? extends Serializable>> |
ConditionalResolverSkewJoin.getTasks(HiveConf conf,
Object objCtx) |
HashMap<Task<? extends Serializable>,Set<String>> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getTaskToAliases() |
Modifier and Type | Method and Description |
---|---|
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setCommonJoinTask(Task<? extends Serializable> commonJoinTask) |
void |
ExplainWork.setFetchTask(Task<? extends Serializable> fetchTask) |
void |
BasicStatsWork.setSourceTask(Task sourceTask) |
void |
StatsWork.setSourceTask(Task<?> sourceTask) |
Modifier and Type | Method and Description |
---|---|
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setDirToTaskMap(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap) |
void |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.setListTasks(List<Task<? extends Serializable>> listTasks) |
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setNoSkewTask(List<Task<? extends Serializable>> noSkewTask) |
void |
ExplainWork.setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) |
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setTaskToAliases(HashMap<Task<? extends Serializable>,Set<String>> taskToAliases) |
Constructor and Description |
---|
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask,
BaseSemanticAnalyzer analyzer,
ExplainConfiguration config,
String cboInfo) |
Constructor and Description |
---|
ConditionalResolverMergeFilesCtx(List<Task<? extends Serializable>> listTasks,
String dir) |
ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap,
List<Task<? extends Serializable>> noSkewTask) |
ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap,
List<Task<? extends Serializable>> noSkewTask) |
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask,
BaseSemanticAnalyzer analyzer,
ExplainConfiguration config,
String cboInfo) |
Modifier and Type | Method and Description |
---|---|
Task |
StatsCollectionContext.getTask() |
Modifier and Type | Method and Description |
---|---|
void |
StatsCollectionContext.setTask(Task task) |
Modifier and Type | Method and Description |
---|---|
protected void |
HCatSemanticAnalyzerBase.authorizeDDL(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks)
Checks for the given rootTasks, and calls authorizeDDLWork() for each DDLWork to
be authorized.
|
void |
HCatSemanticAnalyzerBase.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
void |
HCatSemanticAnalyzer.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
Copyright © 2022 The Apache Software Foundation. All rights reserved.