Modifier and Type | Method and Description |
---|---|
boolean |
HBaseStatsAggregator.connect(org.apache.hadoop.conf.Configuration hiveconf,
Task sourceTask)
Does the necessary HBase initializations.
|
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
DriverContext.getRunnable(int maxthreads) |
Modifier and Type | Method and Description |
---|---|
ArrayList<Task<? extends Serializable>> |
QueryPlan.getRootTasks() |
Modifier and Type | Method and Description |
---|---|
boolean |
DriverContext.addToRunnable(Task<? extends Serializable> tsk) |
static boolean |
DriverContext.isLaunchable(Task<? extends Serializable> tsk)
Checks if a task can be launched.
|
void |
DriverContext.remove(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
void |
QueryPlan.setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) |
Modifier and Type | Class and Description |
---|---|
class |
ColumnStatsTask
ColumnStatsTask implementation.
|
class |
ColumnStatsUpdateTask
ColumnStatsUpdateTask implementation.
|
class |
ConditionalTask
Conditional Task implementation.
|
class |
CopyTask
CopyTask implementation.
|
class |
DDLTask
DDLTask implementation.
|
class |
DependencyCollectionTask
DependencyCollectionTask.
|
class |
ExplainSQRewriteTask |
class |
ExplainTask
ExplainTask implementation.
|
class |
FetchTask
FetchTask implementation.
|
class |
FunctionTask
FunctionTask.
|
class |
MoveTask
MoveTask implementation.
|
class |
StatsNoJobTask
StatsNoJobTask is used in cases where stats collection is the only task for the given query (no
parent MR or Tez job).
|
class |
StatsTask
StatsTask implementation.
|
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
Task.backupTask |
protected Task<? extends Serializable> |
TaskRunner.tsk |
Modifier and Type | Field and Description |
---|---|
protected List<Task<? extends Serializable>> |
Task.backupChildrenTasks |
protected List<Task<? extends Serializable>> |
Task.childTasks |
protected List<Task<? extends Serializable>> |
Task.feedSubscribers |
protected List<Task<? extends Serializable>> |
Task.parentTasks |
Class<? extends Task<T>> |
TaskFactory.TaskTuple.taskClass |
Modifier and Type | Method and Description |
---|---|
static <T extends Serializable> |
TaskFactory.get(Class<T> workClass,
HiveConf conf) |
static <T extends Serializable> |
TaskFactory.get(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
Task<? extends Serializable> |
Task.getAndInitBackupTask() |
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
Task<? extends Serializable> |
Task.getBackupTask() |
Task<? extends Serializable> |
TaskRunner.getTask() |
Modifier and Type | Method and Description |
---|---|
List<Task<? extends Serializable>> |
Task.getBackupChildrenTasks() |
List<Task<? extends Serializable>> |
Task.getChildTasks() |
List<Task<? extends Serializable>> |
Task.getDependentTasks()
The default dependent tasks are just child tasks, but different types could implement their own
(e.g.
|
List<Task<? extends Serializable>> |
ConditionalTask.getDependentTasks() |
List<Task<? extends Serializable>> |
Task.getFeedSubscribers() |
List<Task<? extends Serializable>> |
ConditionalTask.getListTasks() |
List<Task<? extends Serializable>> |
Task.getParentTasks() |
Modifier and Type | Method and Description |
---|---|
boolean |
Task.addDependentTask(Task<? extends Serializable> dependent)
Add a dependent task on the current task.
|
boolean |
ConditionalTask.addDependentTask(Task<? extends Serializable> dependent)
Add a dependent task on the current conditional task.
|
static <T extends Serializable> |
TaskFactory.get(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
static <T extends Serializable> |
TaskFactory.getAndMakeChild(T work,
HiveConf conf,
Task<? extends Serializable>... tasklist) |
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
String ast,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<? extends Serializable>... tasklist) |
static void |
TaskFactory.makeChild(Task<?> ret,
Task<? extends Serializable>... tasklist) |
void |
Task.removeDependentTask(Task<? extends Serializable> dependent)
Remove the dependent task.
|
static void |
Utilities.reworkMapRedWork(Task<? extends Serializable> task,
boolean reworkMapredWork,
HiveConf conf)
The check here is kind of not clean.
|
void |
Task.setBackupTask(Task<? extends Serializable> backupTask) |
void |
Task.subscribeFeed(Task<? extends Serializable> publisher)
Subscribe the feed of publisher.
|
Modifier and Type | Method and Description |
---|---|
org.json.JSONObject |
ExplainTask.getJSONPlan(PrintStream out,
String ast,
List<Task<?>> tasks,
Task<?> fetchTask,
boolean jsonOutput,
boolean isExtended,
boolean appendTaskType) |
static List<ExecDriver> |
Utilities.getMRTasks(List<Task<? extends Serializable>> tasks) |
static List<SparkTask> |
Utilities.getSparkTasks(List<Task<? extends Serializable>> tasks) |
static List<TezTask> |
Utilities.getTezTasks(List<Task<? extends Serializable>> tasks) |
static <T> void |
NodeUtils.iterateTask(Collection<Task<?>> tasks,
Class<T> clazz,
NodeUtils.Function<T> function) |
org.json.JSONObject |
ExplainTask.outputDependencies(PrintStream out,
boolean jsonOutput,
boolean appendTaskType,
List<Task> tasks) |
org.json.JSONObject |
ExplainTask.outputStagePlans(PrintStream out,
List<Task> tasks,
boolean jsonOutput,
boolean isExtended) |
void |
Task.setBackupChildrenTasks(List<Task<? extends Serializable>> backupChildrenTasks) |
void |
Task.setChildTasks(List<Task<? extends Serializable>> childTasks) |
void |
Task.setFeedSubscribers(List<Task<? extends Serializable>> s) |
void |
ConditionalTask.setListTasks(List<Task<? extends Serializable>> listTasks) |
void |
Task.setParentTasks(List<Task<? extends Serializable>> parentTasks) |
Constructor and Description |
---|
TaskRunner(Task<? extends Serializable> tsk,
TaskResult result) |
Constructor and Description |
---|
TaskFactory.TaskTuple(Class<T> workClass,
Class<? extends Task<T>> taskClass) |
Modifier and Type | Class and Description |
---|---|
class |
ExecDriver
ExecDriver is the central class in co-ordinating execution of any map-reduce task.
|
class |
MapredLocalTask
MapredLocalTask represents any local work (i.e.: client side work) that hive needs to
execute.
|
class |
MapRedTask
Extension of ExecDriver:
- can optionally spawn a map-reduce task from a separate jvm
- will make last minute adjustments to map-reduce job parameters, viz:
* estimating number of reducers
* estimating whether job should run locally
|
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
HadoopJobExecHelper.task |
Constructor and Description |
---|
HadoopJobExecHelper(org.apache.hadoop.mapred.JobConf job,
SessionState.LogHelper console,
Task<? extends Serializable> task,
HadoopJobExecHook hookCallBack) |
Modifier and Type | Class and Description |
---|---|
class |
SparkTask |
Modifier and Type | Class and Description |
---|---|
class |
TezTask
TezTask handles the execution of TezWork.
|
Modifier and Type | Method and Description |
---|---|
void |
HiveHistoryImpl.endTask(String queryId,
Task<? extends Serializable> task) |
void |
HiveHistory.endTask(String queryId,
Task<? extends Serializable> task)
Called at the end of a task.
|
void |
HiveHistoryImpl.progressTask(String queryId,
Task<? extends Serializable> task) |
void |
HiveHistory.progressTask(String queryId,
Task<? extends Serializable> task)
Logs progress of a task if ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS is
set to true
|
void |
HiveHistoryImpl.startTask(String queryId,
Task<? extends Serializable> task,
String taskName) |
void |
HiveHistory.startTask(String queryId,
Task<? extends Serializable> task,
String taskName)
Called at the start of a task.
|
Modifier and Type | Class and Description |
---|---|
class |
IndexMetadataChangeTask |
Modifier and Type | Method and Description |
---|---|
protected Task<?> |
TableBasedIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Index index,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
protected Task<?> |
AggregateIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Index index,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
protected Task<?> |
TableBasedIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Method and Description |
---|---|
List<Task<?>> |
TableBasedIndexHandler.generateIndexBuildTaskList(Table baseTbl,
Index index,
List<Partition> indexTblPartitions,
List<Partition> baseTblPartitions,
Table indexTbl,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
List<Task<?>> |
HiveIndexHandler.generateIndexBuildTaskList(Table baseTbl,
Index index,
List<Partition> indexTblPartitions,
List<Partition> baseTblPartitions,
Table indexTbl,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
Requests that the handler generate a plan for building the index; the plan
should read the base table and write out the index representation.
|
List<Task<? extends Serializable>> |
HiveIndexQueryContext.getQueryTasks() |
Modifier and Type | Method and Description |
---|---|
void |
HiveIndexQueryContext.setQueryTasks(List<Task<? extends Serializable>> indexQueryTasks) |
Modifier and Type | Method and Description |
---|---|
protected Task<?> |
BitmapIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Method and Description |
---|---|
protected Task<?> |
CompactIndexHandler.getIndexBuilderMapRedTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
List<FieldSchema> indexField,
boolean partitioned,
PartitionDesc indexTblPartDesc,
String indexTableName,
PartitionDesc baseTablePartDesc,
String baseTableName,
String dbName) |
Modifier and Type | Class and Description |
---|---|
class |
MergeFileTask
Task for fast merging of ORC and RC files.
|
Modifier and Type | Class and Description |
---|---|
class |
PartialScanTask
PartialScanTask.
|
Modifier and Type | Class and Description |
---|---|
class |
ColumnTruncateTask |
Modifier and Type | Method and Description |
---|---|
static Task<?> |
IndexUtils.createRootTask(HiveConf builderConf,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
StringBuilder command,
LinkedHashMap<String,String> partSpec,
String indexTableName,
String dbName) |
static Task<MoveWork> |
GenMapRedUtils.findMoveTask(List<Task<MoveWork>> mvTasks,
FileSinkOperator fsOp) |
Task<? extends Serializable> |
GenMRProcContext.getCurrTask() |
Task<? extends Serializable> |
GenMRProcContext.GenMapRedCtx.getCurrTask() |
Task<? extends Serializable> |
GenMRProcContext.GenMRUnionCtx.getUTask() |
Modifier and Type | Method and Description |
---|---|
Map<FileSinkDesc,Task<? extends Serializable>> |
GenMRProcContext.getLinkedFileDescTasks() |
List<Task<MoveWork>> |
GenMRProcContext.getMvTask() |
HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> |
GenMRProcContext.getOpTaskMap() |
List<Task<? extends Serializable>> |
GenMRProcContext.getRootTasks() |
Modifier and Type | Method and Description |
---|---|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<? extends Serializable> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
static void |
GenMapRedUtils.addDependentMoveTasks(Task<MoveWork> mvTask,
HiveConf hconf,
Task<? extends Serializable> parentTask,
DependencyCollectionTask dependencyTask)
Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask.
|
boolean |
GenMRProcContext.addRootIfPossible(Task<? extends Serializable> task) |
void |
GenMRProcContext.addSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.addStatsTask(FileSinkOperator nd,
MoveTask mvTask,
Task<? extends Serializable> currTask,
HiveConf hconf)
Add the StatsTask as a dependent task of the MoveTask
because StatsTask will change the Table/Partition metadata.
|
static ConditionalTask |
GenMapRedUtils.createCondTask(HiveConf conf,
Task<? extends Serializable> currTask,
MoveWork mvWork,
Serializable mergeWork,
String inputPath)
Construct a conditional task given the current leaf task, the MoveWork and the MapredWork.
|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<? extends Serializable> currTask) |
static void |
GenMapRedUtils.initUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currTask,
boolean local) |
static void |
GenMapRedUtils.initUnionPlan(ReduceSinkOperator op,
UnionOperator currUnionOp,
GenMRProcContext opProcCtx,
Task<? extends Serializable> unionTask)
Initialize the current union plan.
|
static void |
GenMapRedUtils.internTableDesc(Task<?> task,
com.google.common.collect.Interner<TableDesc> interner) |
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<? extends Serializable> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
boolean |
GenMRProcContext.isSeenOp(Task task,
Operator operator) |
static void |
GenMapRedUtils.joinPlan(Task<? extends Serializable> currTask,
Task<? extends Serializable> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinPlan(Task<? extends Serializable> currTask,
Task<? extends Serializable> oldTask,
GenMRProcContext opProcCtx)
Merge the current task into the old task for the reducer
|
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currentUnionTask,
Task<? extends Serializable> existingTask,
boolean local) |
static void |
GenMapRedUtils.joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
Task<? extends Serializable> currentUnionTask,
Task<? extends Serializable> existingTask,
boolean local) |
static void |
GenMapRedUtils.linkMoveTask(Task<MoveWork> mvTask,
Task<? extends Serializable> task,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Follows the task tree down from task and makes all leaves parents of mvTask
|
static void |
GenMapRedUtils.linkMoveTask(Task<MoveWork> mvTask,
Task<? extends Serializable> task,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Follows the task tree down from task and makes all leaves parents of mvTask
|
void |
GenMRProcContext.setCurrTask(Task<? extends Serializable> currTask) |
static void |
GenMapRedUtils.setKeyAndValueDescForTaskTree(Task<? extends Serializable> task)
Set the key and value description for all the tasks rooted at the given
task.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
Operator<? extends OperatorDesc> topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx)
set the current task in the mapredWork.
|
static void |
GenMapRedUtils.setTaskPlan(String alias_id,
Operator<? extends OperatorDesc> topOp,
Task<?> task,
boolean local,
GenMRProcContext opProcCtx,
PrunedPartitionList pList)
set the current task in the mapredWork.
|
Modifier and Type | Method and Description |
---|---|
static org.apache.hadoop.fs.Path |
GenMapRedUtils.createMoveTask(Task<? extends Serializable> currTask,
boolean chDir,
FileSinkOperator fsOp,
ParseContext parseCtx,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Create and add any dependent move tasks
|
static void |
GenMapRedUtils.createMRWorkForMergingFiles(FileSinkOperator fsInput,
org.apache.hadoop.fs.Path finalName,
DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks,
HiveConf conf,
Task<? extends Serializable> currTask) |
static Task<MoveWork> |
GenMapRedUtils.findMoveTask(List<Task<MoveWork>> mvTasks,
FileSinkOperator fsOp) |
static boolean |
GenMapRedUtils.isMergeRequired(List<Task<MoveWork>> mvTasks,
HiveConf hconf,
FileSinkOperator fsOp,
Task<? extends Serializable> currTask,
boolean isInsertTable)
Returns true iff the fsOp requires a merge
|
static void |
GenMapRedUtils.linkMoveTask(FileSinkOperator newOutput,
ConditionalTask cndTsk,
List<Task<MoveWork>> mvTasks,
HiveConf hconf,
DependencyCollectionTask dependencyTask)
Make the move task in the GenMRProcContext following the FileSinkOperator a dependent of all
possible subtrees branching from the ConditionalTask.
|
void |
GenMRProcContext.setLinkedFileDescTasks(Map<FileSinkDesc,Task<? extends Serializable>> linkedFileDescTasks) |
void |
GenMRProcContext.setMvTask(List<Task<MoveWork>> mvTask) |
void |
GenMRProcContext.setOpTaskMap(HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> opTaskMap) |
void |
GenMRProcContext.setRootTasks(List<Task<? extends Serializable>> rootTasks) |
Constructor and Description |
---|
GenMRProcContext.GenMapRedCtx(Task<? extends Serializable> currTask,
String currAliasId) |
GenMRProcContext.GenMRUnionCtx(Task<? extends Serializable> uTask) |
Modifier and Type | Field and Description |
---|---|
protected Task<? extends Serializable> |
PhysicalContext.fetchTask |
Modifier and Type | Field and Description |
---|---|
protected List<Task<? extends Serializable>> |
PhysicalContext.rootTasks |
protected Set<Task<?>> |
StageIDsRearranger.TaskTraverse.traversed |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
SkewJoinResolver.SkewJoinProcCtx.getCurrentTask() |
Task<? extends Serializable> |
MapJoinResolver.LocalMapJoinProcCtx.getCurrentTask() |
Task<? extends Serializable> |
PhysicalContext.getFetchTask() |
Task<? extends Serializable> |
SortMergeJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<? extends Serializable> |
CommonJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
abstract Task<? extends Serializable> |
AbstractJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Modifier and Type | Method and Description |
---|---|
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
static List<Task> |
StageIDsRearranger.getExplainOrder(HiveConf conf,
List<Task<?>> tasks) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
List<Task<? extends Serializable>> |
PhysicalContext.getRootTasks() |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
Modifier and Type | Method and Description |
---|---|
protected void |
StageIDsRearranger.TaskTraverse.accepted(Task<?> task) |
void |
PhysicalContext.addToRootTask(Task<? extends Serializable> tsk) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.getChildTasks(Task<?> task) |
protected boolean |
StageIDsRearranger.TaskTraverse.isReady(Task<?> task) |
protected List<Task<?>> |
StageIDsRearranger.TaskTraverse.next(Task<?> task) |
static void |
GenMRSkewJoinProcessor.processSkewJoin(JoinOperator joinOp,
Task<? extends Serializable> currTask,
ParseContext parseCtx)
Create tasks for processing skew joins.
|
static void |
GenSparkSkewJoinProcessor.processSkewJoin(JoinOperator joinOp,
Task<? extends Serializable> currTask,
ReduceWork reduceWork,
ParseContext parseCtx) |
protected void |
StageIDsRearranger.TaskTraverse.rejected(Task<?> child) |
void |
PhysicalContext.removeFromRootTask(Task<? extends Serializable> tsk) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<? extends Serializable> currTask,
Task<? extends Serializable> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTask(Task<? extends Serializable> currTask,
Task<? extends Serializable> newTask) |
protected void |
AbstractJoinTaskDispatcher.replaceTaskWithConditionalTask(Task<? extends Serializable> currTask,
ConditionalTask cndTsk) |
void |
SkewJoinResolver.SkewJoinProcCtx.setCurrentTask(Task<? extends Serializable> currentTask) |
void |
MapJoinResolver.LocalMapJoinProcCtx.setCurrentTask(Task<? extends Serializable> currentTask) |
void |
PhysicalContext.setFetchTask(Task<? extends Serializable> fetchTask) |
void |
StageIDsRearranger.TaskTraverse.traverse(Task<?> task) |
Modifier and Type | Method and Description |
---|---|
static List<Task> |
StageIDsRearranger.getExplainOrder(HiveConf conf,
List<Task<?>> tasks) |
static List<Task> |
StageIDsRearranger.getFetchSources(List<Task<?>> tasks) |
void |
PhysicalContext.setRootTasks(List<Task<? extends Serializable>> rootTasks) |
Constructor and Description |
---|
MapJoinResolver.LocalMapJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
SkewJoinResolver.SkewJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
Constructor and Description |
---|
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
IndexWhereProcCtx.getCurrentTask() |
Constructor and Description |
---|
IndexWhereProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
Constructor and Description |
---|
SparkSkewJoinResolver.SparkSkewJoinProcCtx(Task<? extends Serializable> task,
ParseContext parseCtx) |
Modifier and Type | Field and Description |
---|---|
List<Task<MoveWork>> |
GenTezProcContext.moveTask |
List<Task<? extends Serializable>> |
GenTezProcContext.rootTasks |
protected List<Task<? extends Serializable>> |
BaseSemanticAnalyzer.rootTasks |
Modifier and Type | Method and Description |
---|---|
List<Task<? extends Serializable>> |
IndexUpdater.generateUpdateTasks() |
List<Task<? extends Serializable>> |
BaseSemanticAnalyzer.getRootTasks() |
Modifier and Type | Method and Description |
---|---|
void |
ParseContext.replaceRootTask(Task<? extends Serializable> rootTask,
List<? extends Task<? extends Serializable>> tasks) |
protected void |
TezCompiler.setInputFormat(Task<? extends Serializable> task) |
protected abstract void |
TaskCompiler.setInputFormat(Task<? extends Serializable> rootTask) |
protected void |
MapReduceCompiler.setInputFormat(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
protected void |
TezCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected abstract void |
TaskCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
MapReduceCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
TaskCompiler.genColumnStatsTask(BaseSemanticAnalyzer.AnalyzeRewriteContext analyzeRewrite,
List<LoadTableDesc> loadTableWork,
List<LoadFileDesc> loadFileWork,
List<Task<? extends Serializable>> rootTasks,
int outerQueryLimit)
A helper function to generate a column stats task on top of map-red task.
|
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TaskCompiler.getLeafTasks(List<Task<? extends Serializable>> rootTasks,
HashSet<Task<? extends Serializable>> leaves)
Find all leaf tasks of the list of root tasks.
|
protected void |
TaskCompiler.getLeafTasks(List<Task<? extends Serializable>> rootTasks,
HashSet<Task<? extends Serializable>> leaves)
Find all leaf tasks of the list of root tasks.
|
ParseContext |
TaskCompiler.getParseContext(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks)
Create a clone of the parse context
|
protected void |
TezCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected abstract void |
TaskCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
MapReduceCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
void |
HiveSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks)
Invoked after Hive performs its own semantic analysis on a
statement (including optimization).
|
void |
AbstractSemanticAnalyzerHook.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
void |
ParseContext.replaceRootTask(Task<? extends Serializable> rootTask,
List<? extends Task<? extends Serializable>> tasks) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
List<Task<MoveWork>> |
GenSparkProcContext.moveTask |
List<Task<? extends Serializable>> |
GenSparkProcContext.rootTasks |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.setInputFormat(Task<? extends Serializable> task) |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
Constructor and Description |
---|
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,Operator<? extends OperatorDesc>> topOps) |
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,Operator<? extends OperatorDesc>> topOps) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getCommonJoinTask() |
Task<? extends Serializable> |
ExplainWork.getFetchTask() |
Task<? extends Serializable> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getNoSkewTask() |
Task |
StatsWork.getSourceTask() |
protected Task<? extends Serializable> |
ConditionalResolverCommonJoin.resolveMapJoinTask(ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx ctx,
HiveConf conf) |
Modifier and Type | Method and Description |
---|---|
HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.getDirToTaskMap() |
List<Task<? extends Serializable>> |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.getListTasks() |
ArrayList<Task<? extends Serializable>> |
ExplainWork.getRootTasks() |
List<Task<? extends Serializable>> |
ConditionalResolverSkewJoin.getTasks(HiveConf conf,
Object objCtx) |
List<Task<? extends Serializable>> |
ConditionalResolverMergeFiles.getTasks(HiveConf conf,
Object objCtx) |
List<Task<? extends Serializable>> |
ConditionalResolverCommonJoin.getTasks(HiveConf conf,
Object objCtx) |
List<Task<? extends Serializable>> |
ConditionalResolver.getTasks(HiveConf conf,
Object ctx)
All conditional resolvers implement this interface.
|
HashMap<Task<? extends Serializable>,Set<String>> |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.getTaskToAliases() |
Modifier and Type | Method and Description |
---|---|
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setCommonJoinTask(Task<? extends Serializable> commonJoinTask) |
void |
ExplainWork.setFetchTask(Task<? extends Serializable> fetchTask) |
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setNoSkewTask(Task<? extends Serializable> noSkewTask) |
void |
StatsWork.setSourceTask(Task sourceTask) |
Modifier and Type | Method and Description |
---|---|
void |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx.setDirToTaskMap(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap) |
void |
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx.setListTasks(List<Task<? extends Serializable>> listTasks) |
void |
ExplainWork.setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) |
void |
ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx.setTaskToAliases(HashMap<Task<? extends Serializable>,Set<String>> taskToAliases) |
Constructor and Description |
---|
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap,
Task<? extends Serializable> noSkewTask) |
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask,
String astStringTree,
BaseSemanticAnalyzer analyzer,
boolean extended,
boolean formatted,
boolean dependency,
boolean logical,
boolean authorize,
boolean userLevelExplain,
String cboInfo) |
Constructor and Description |
---|
ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx(List<Task<? extends Serializable>> listTasks,
String dir) |
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx(HashMap<org.apache.hadoop.fs.Path,Task<? extends Serializable>> dirToTaskMap,
Task<? extends Serializable> noSkewTask) |
ExplainWork(org.apache.hadoop.fs.Path resFile,
ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask,
String astStringTree,
BaseSemanticAnalyzer analyzer,
boolean extended,
boolean formatted,
boolean dependency,
boolean logical,
boolean authorize,
boolean userLevelExplain,
String cboInfo) |
Modifier and Type | Method and Description |
---|---|
boolean |
StatsAggregator.connect(org.apache.hadoop.conf.Configuration hconf,
Task sourceTask)
This method connects to the temporary storage.
|
boolean |
CounterStatsAggregatorTez.connect(org.apache.hadoop.conf.Configuration hconf,
Task sourceTask) |
boolean |
CounterStatsAggregatorSpark.connect(org.apache.hadoop.conf.Configuration hconf,
Task sourceTask) |
boolean |
CounterStatsAggregator.connect(org.apache.hadoop.conf.Configuration hconf,
Task sourceTask) |
Modifier and Type | Method and Description |
---|---|
boolean |
FSStatsAggregator.connect(org.apache.hadoop.conf.Configuration hconf,
Task sourceTask) |
Modifier and Type | Method and Description |
---|---|
boolean |
JDBCStatsAggregator.connect(org.apache.hadoop.conf.Configuration hiveconf,
Task sourceTask) |
Modifier and Type | Method and Description |
---|---|
protected void |
HCatSemanticAnalyzerBase.authorizeDDL(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks)
Checks for the given rootTasks, and calls authorizeDDLWork() for each DDLWork to
be authorized.
|
void |
HCatSemanticAnalyzerBase.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
void |
HCatSemanticAnalyzer.postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks) |
Copyright © 2017 The Apache Software Foundation. All rights reserved.