Package | Description |
---|---|
org.apache.hadoop.hive.ql | |
org.apache.hadoop.hive.ql.exec |
Hive QL execution tasks, operators, functions and other handlers.
|
org.apache.hadoop.hive.ql.exec.mr | |
org.apache.hadoop.hive.ql.exec.spark | |
org.apache.hadoop.hive.ql.exec.tez | |
org.apache.hadoop.hive.ql.exec.tez.monitoring | |
org.apache.hadoop.hive.ql.lockmgr |
Hive Lock Manager interfaces and some custom implmentations
|
org.apache.hadoop.hive.ql.optimizer.physical | |
org.apache.hadoop.hive.ql.parse | |
org.apache.hadoop.hive.ql.parse.spark | |
org.apache.hadoop.hive.ql.plan |
Modifier and Type | Method and Description |
---|---|
Context |
DriverContext.getCtx() |
Constructor and Description |
---|
Driver(HiveConf conf,
Context ctx) |
DriverContext(Context ctx) |
Modifier and Type | Method and Description |
---|---|
static List<org.apache.hadoop.fs.Path> |
Utilities.getInputPaths(org.apache.hadoop.mapred.JobConf job,
MapWork work,
org.apache.hadoop.fs.Path hiveScratchDir,
Context ctx,
boolean skipDummy)
Computes a list of all input paths needed to compute the given MapWork.
|
static org.apache.hadoop.fs.ContentSummary |
Utilities.getInputSummary(Context ctx,
MapWork work,
org.apache.hadoop.fs.PathFilter filter)
Calculate the total size of input files.
|
static boolean |
Utilities.isEmptyPath(org.apache.hadoop.mapred.JobConf job,
org.apache.hadoop.fs.Path dirPath,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
static String |
ExecDriver.generateCmdLine(HiveConf hconf,
Context ctx)
Given a Hive Configuration object - generate a command line fragment for passing such
configuration information to ExecDriver.
|
int |
HadoopJobExecHelper.progress(org.apache.hadoop.mapred.RunningJob rj,
org.apache.hadoop.mapred.JobClient jc,
Context ctx) |
Constructor and Description |
---|
SparkPlanGenerator(org.apache.spark.api.java.JavaSparkContext sc,
Context context,
org.apache.hadoop.mapred.JobConf jobConf,
org.apache.hadoop.fs.Path scratchDir,
SparkReporter sparkReporter) |
Modifier and Type | Method and Description |
---|---|
org.apache.tez.dag.api.Vertex |
DagUtils.createVertex(org.apache.hadoop.mapred.JobConf conf,
BaseWork work,
org.apache.hadoop.fs.Path scratchDir,
org.apache.hadoop.yarn.api.records.LocalResource appJarLr,
List<org.apache.hadoop.yarn.api.records.LocalResource> additionalLr,
org.apache.hadoop.fs.FileSystem fileSystem,
Context ctx,
boolean hasChildren,
TezWork tezWork,
TezWork.VertexType vertexType)
Create a vertex from a given work object.
|
org.apache.hadoop.mapred.JobConf |
DagUtils.initializeVertexConf(org.apache.hadoop.mapred.JobConf conf,
Context context,
BaseWork work)
Creates and initializes the JobConf object for a given BaseWork object.
|
Constructor and Description |
---|
TezJobMonitor(Map<String,BaseWork> workMap,
org.apache.tez.dag.api.client.DAGClient dagClient,
HiveConf conf,
org.apache.tez.dag.api.DAG dag,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
void |
DbTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username) |
void |
HiveTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username)
Acquire all of the locks needed by a query.
|
void |
HiveTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username,
Driver.LockedDriverState lDrvState)
Acquire all of the locks needed by a query.
|
long |
DbTxnManager.openTxn(Context ctx,
String user) |
long |
HiveTxnManager.openTxn(Context ctx,
String user)
Open a new transaction.
|
Modifier and Type | Method and Description |
---|---|
Context |
PhysicalContext.getContext() |
Modifier and Type | Method and Description |
---|---|
long |
AbstractJoinTaskDispatcher.getTotalKnownInputSize(Context context,
MapWork currWork,
Map<org.apache.hadoop.fs.Path,ArrayList<String>> pathToAliases,
HashMap<String,Long> aliasToSize) |
Task<? extends Serializable> |
CommonJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<? extends Serializable> |
SortMergeJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
abstract Task<? extends Serializable> |
AbstractJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
void |
PhysicalContext.setContext(Context context) |
Constructor and Description |
---|
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
Modifier and Type | Field and Description |
---|---|
protected Context |
BaseSemanticAnalyzer.ctx |
Modifier and Type | Method and Description |
---|---|
Context |
ParseContext.getContext() |
Context |
EximUtil.SemanticAnalyzerWrapperContext.getCtx() |
Modifier and Type | Method and Description |
---|---|
void |
BaseSemanticAnalyzer.analyze(ASTNode ast,
Context ctx) |
void |
ColumnStatsSemanticAnalyzer.analyze(ASTNode ast,
Context origCtx) |
protected abstract void |
TaskCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
TezCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
MapReduceCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
void |
BaseSemanticAnalyzer.initCtx(Context ctx) |
protected abstract void |
TaskCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
TezCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
MapReduceCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
static ASTNode |
ParseUtils.parse(String command,
Context ctx)
Parses the Hive query.
|
ASTNode |
ParseDriver.parse(String command,
Context ctx) |
static ASTNode |
ParseUtils.parse(String command,
Context ctx,
String viewFullyQualifiedName)
Parses the Hive query.
|
ASTNode |
ParseDriver.parse(String command,
Context ctx,
String viewFullyQualifiedName)
Parses a command, optionally assigning the parser's token stream to the
given context.
|
ASTNode |
ParseDriver.parseSelect(String command,
Context ctx) |
static void |
ExportSemanticAnalyzer.prepareExport(ASTNode ast,
URI toURI,
BaseSemanticAnalyzer.TableSpec ts,
ReplicationSpec replicationSpec,
Hive db,
HiveConf conf,
Context ctx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.slf4j.Logger LOG) |
protected static ASTNode |
SemanticAnalyzer.rewriteASTWithMaskAndFilter(TableMask tableMask,
ASTNode ast,
org.antlr.runtime.TokenRewriteStream tokenRewriteStream,
Context ctx,
Hive db,
Map<String,Table> tabNameToTabObject,
Set<Integer> ignoredTokens) |
void |
ParseContext.setContext(Context ctx) |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
SparkCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
Context |
ExplainSQRewriteWork.getCtx() |
Constructor and Description |
---|
ExplainSQRewriteWork(String resFile,
QB qb,
ASTNode ast,
Context ctx) |
Copyright © 2021 The Apache Software Foundation. All rights reserved.