Package | Description |
---|---|
org.apache.hadoop.hive.ql | |
org.apache.hadoop.hive.ql.exec |
Hive QL execution tasks, operators, functions and other handlers.
|
org.apache.hadoop.hive.ql.exec.mr | |
org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.util | |
org.apache.hadoop.hive.ql.exec.spark | |
org.apache.hadoop.hive.ql.exec.tez | |
org.apache.hadoop.hive.ql.exec.tez.monitoring | |
org.apache.hadoop.hive.ql.hooks | |
org.apache.hadoop.hive.ql.lockmgr |
Hive Lock Manager interfaces and some custom implmentations
|
org.apache.hadoop.hive.ql.optimizer.physical | |
org.apache.hadoop.hive.ql.parse | |
org.apache.hadoop.hive.ql.parse.repl.load.message | |
org.apache.hadoop.hive.ql.parse.spark | |
org.apache.hadoop.hive.ql.plan | |
org.apache.hadoop.hive.ql.reexec |
Modifier and Type | Method and Description |
---|---|
Context |
Driver.getContext() |
Context |
IDriver.getContext() |
Context |
DriverContext.getCtx() |
Modifier and Type | Method and Description |
---|---|
void |
Context.addRewrittenStatementContext(Context context) |
Constructor and Description |
---|
Context(Context ctx) |
Driver(HiveConf conf,
Context ctx,
LineageState lineageState) |
DriverContext(Context ctx) |
Modifier and Type | Method and Description |
---|---|
static List<org.apache.hadoop.fs.Path> |
Utilities.getInputPaths(org.apache.hadoop.mapred.JobConf job,
MapWork work,
org.apache.hadoop.fs.Path hiveScratchDir,
Context ctx,
boolean skipDummy)
Computes a list of all input paths needed to compute the given MapWork.
|
static org.apache.hadoop.fs.ContentSummary |
Utilities.getInputSummary(Context ctx,
MapWork work,
org.apache.hadoop.fs.PathFilter filter)
Calculate the total size of input files.
|
static boolean |
Utilities.isEmptyPath(org.apache.hadoop.mapred.JobConf job,
org.apache.hadoop.fs.Path dirPath,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
static String |
ExecDriver.generateCmdLine(HiveConf hconf,
Context ctx)
Given a Hive Configuration object - generate a command line fragment for passing such
configuration information to ExecDriver.
|
int |
HadoopJobExecHelper.progress(org.apache.hadoop.mapred.RunningJob rj,
org.apache.hadoop.mapred.JobClient jc,
Context ctx) |
Modifier and Type | Field and Description |
---|---|
Context |
Context.nestedContext |
Constructor and Description |
---|
Context(String dumpDirectory,
HiveConf hiveConf,
Hive hiveDb,
LineageState lineageState,
Context nestedContext) |
Constructor and Description |
---|
SparkPlanGenerator(org.apache.spark.api.java.JavaSparkContext sc,
Context context,
org.apache.hadoop.mapred.JobConf jobConf,
org.apache.hadoop.fs.Path scratchDir,
SparkReporter sparkReporter) |
Modifier and Type | Method and Description |
---|---|
org.apache.tez.dag.api.Vertex |
DagUtils.createVertex(org.apache.hadoop.mapred.JobConf conf,
BaseWork work,
org.apache.hadoop.fs.Path scratchDir,
org.apache.hadoop.fs.FileSystem fileSystem,
Context ctx,
boolean hasChildren,
TezWork tezWork,
TezWork.VertexType vertexType,
Map<String,org.apache.hadoop.yarn.api.records.LocalResource> localResources)
Create a vertex from a given work object.
|
org.apache.hadoop.mapred.JobConf |
DagUtils.initializeVertexConf(org.apache.hadoop.mapred.JobConf conf,
Context context,
BaseWork work)
Creates and initializes the JobConf object for a given BaseWork object.
|
Constructor and Description |
---|
TezJobMonitor(List<BaseWork> topSortedWorks,
org.apache.tez.dag.api.client.DAGClient dagClient,
HiveConf conf,
org.apache.tez.dag.api.DAG dag,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
Context |
PrivateHookContext.getContext() |
Constructor and Description |
---|
PrivateHookContext(QueryPlan queryPlan,
QueryState queryState,
Map<String,org.apache.hadoop.fs.ContentSummary> inputPathToContentSummary,
String userName,
String ipAddress,
String hiveInstanceAddress,
String operationId,
String sessionId,
String threadId,
boolean isHiveServerQuery,
PerfLogger perfLogger,
QueryInfo queryInfo,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
void |
DbTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username) |
void |
HiveTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username)
Acquire all of the locks needed by a query.
|
void |
HiveTxnManager.acquireLocks(QueryPlan plan,
Context ctx,
String username,
Driver.LockedDriverState lDrvState)
Acquire all of the locks needed by a query.
|
long |
DbTxnManager.openTxn(Context ctx,
String user) |
long |
HiveTxnManager.openTxn(Context ctx,
String user)
Open a new transaction.
|
Modifier and Type | Method and Description |
---|---|
Context |
PhysicalContext.getContext() |
Modifier and Type | Method and Description |
---|---|
long |
AbstractJoinTaskDispatcher.getTotalKnownInputSize(Context context,
MapWork currWork,
Map<org.apache.hadoop.fs.Path,ArrayList<String>> pathToAliases,
HashMap<String,Long> aliasToSize) |
Task<? extends Serializable> |
CommonJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
Task<? extends Serializable> |
SortMergeJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
abstract Task<? extends Serializable> |
AbstractJoinTaskDispatcher.processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask,
Context context) |
void |
PhysicalContext.setContext(Context context) |
Constructor and Description |
---|
PhysicalContext(HiveConf conf,
ParseContext parseContext,
Context context,
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask) |
Modifier and Type | Field and Description |
---|---|
protected Context |
BaseSemanticAnalyzer.ctx |
Modifier and Type | Method and Description |
---|---|
Context |
ParseContext.getContext() |
Context |
EximUtil.SemanticAnalyzerWrapperContext.getCtx() |
Modifier and Type | Method and Description |
---|---|
void |
BaseSemanticAnalyzer.analyze(ASTNode ast,
Context ctx) |
void |
ColumnStatsSemanticAnalyzer.analyze(ASTNode ast,
Context origCtx) |
protected abstract void |
TaskCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
TezCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
MapReduceCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
void |
BaseSemanticAnalyzer.initCtx(Context ctx) |
protected abstract void |
TaskCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
TezCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
protected void |
MapReduceCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
static ASTNode |
ParseUtils.parse(String command,
Context ctx)
Parses the Hive query.
|
ASTNode |
ParseDriver.parse(String command,
Context ctx) |
static ASTNode |
ParseUtils.parse(String command,
Context ctx,
String viewFullyQualifiedName)
Parses the Hive query.
|
ASTNode |
ParseDriver.parse(String command,
Context ctx,
String viewFullyQualifiedName)
Parses a command, optionally assigning the parser's token stream to the
given context.
|
ASTNode |
ParseDriver.parseSelect(String command,
Context ctx) |
protected static ASTNode |
SemanticAnalyzer.rewriteASTWithMaskAndFilter(TableMask tableMask,
ASTNode ast,
org.antlr.runtime.TokenRewriteStream tokenRewriteStream,
Context ctx,
Hive db,
Map<String,Table> tabNameToTabObject,
Set<Integer> ignoredTokens) |
void |
ParseContext.setContext(Context ctx) |
Modifier and Type | Method and Description |
---|---|
Context |
MessageHandler.Context.getNestedContext() |
Constructor and Description |
---|
Context(String dbName,
String tableName,
String location,
Task<? extends Serializable> precursor,
DumpMetaData dmd,
HiveConf hiveConf,
Hive db,
Context nestedContext,
org.slf4j.Logger log) |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.decideExecMode(List<Task<? extends Serializable>> rootTasks,
Context ctx,
GlobalLimitCtx globalLimitCtx) |
protected void |
SparkCompiler.optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
Context |
ExplainSQRewriteWork.getCtx() |
Constructor and Description |
---|
ExplainSQRewriteWork(String resFile,
QB qb,
ASTNode ast,
Context ctx) |
Modifier and Type | Method and Description |
---|---|
Context |
ReExecDriver.getContext() |
Copyright © 2022 The Apache Software Foundation. All rights reserved.