public class Context extends Object
Modifier and Type | Class and Description |
---|---|
static class |
Context.DestClausePrefix |
static class |
Context.Operation
These ops require special handling in various places
(note that Insert into Acid table is in OTHER category)
|
Modifier and Type | Field and Description |
---|---|
protected String |
cboInfo |
protected boolean |
cboSucceeded |
protected String |
cmd |
protected ExplainConfiguration |
explainConfig |
static String |
EXT_PREFIX |
protected List<HiveLock> |
hiveLocks |
protected HiveTxnManager |
hiveTxnManager |
protected int |
pathid |
Modifier | Constructor and Description |
---|---|
|
Context(org.apache.hadoop.conf.Configuration conf) |
protected |
Context(Context ctx) |
Modifier and Type | Method and Description |
---|---|
void |
addCS(String path,
org.apache.hadoop.fs.ContentSummary cs) |
Context.DestClausePrefix |
addDestNamePrefix(int pos,
Context.DestClausePrefix prefix)
Will make SemanticAnalyzer.Phase1Ctx#dest in subtree rooted at 'tree' use 'prefix'.
|
void |
addMaterializedTable(String cteName,
Table table) |
void |
addRewrittenStatementContext(Context context) |
void |
addViewTokenRewriteStream(String viewFullyQualifiedName,
org.antlr.runtime.TokenRewriteStream tokenRewriteStream) |
void |
checkHeartbeaterLockException() |
void |
clear() |
static String |
generateExecutionId()
Generate a unique executionId.
|
String |
getCboInfo() |
String |
getCmd()
Find the original query command.
|
org.apache.hadoop.conf.Configuration |
getConf() |
org.apache.hadoop.fs.ContentSummary |
getCS(org.apache.hadoop.fs.Path path) |
org.apache.hadoop.fs.ContentSummary |
getCS(String path) |
Context.DestClausePrefix |
getDestNamePrefix(ASTNode curNode,
QB queryBlock)
The suffix is always relative to a given ASTNode
|
String |
getExecutionId() |
int |
getExecutionIndex() |
ExplainConfiguration.AnalyzeState |
getExplainAnalyze() |
ExplainConfiguration |
getExplainConfig() |
boolean |
getExplainLogical()
Find whether the current query is a logical explain query
|
org.apache.hadoop.fs.Path |
getExternalTmpPath(org.apache.hadoop.fs.Path path)
Get a path to store tmp data destined for external Path.
|
org.apache.hadoop.fs.Path |
getExtTmpPathRelTo(org.apache.hadoop.fs.Path path)
This is similar to getExternalTmpPath() with difference being this method returns temp path
within passed in uri, whereas getExternalTmpPath() ignores passed in path and returns temp
path within /tmp
|
Map<String,org.apache.hadoop.fs.Path> |
getFsScratchDirs() |
DbTxnManager.Heartbeater |
getHeartbeater() |
List<HiveLock> |
getHiveLocks() |
HiveTxnManager |
getHiveTxnManager() |
boolean |
getIsUpdateDeleteMerge() |
Map<LoadTableDesc,WriteEntity> |
getLoadTableOutputMap() |
org.apache.hadoop.fs.Path |
getLocalScratchDir(boolean mkdir)
Create a local scratch directory on demand and return it.
|
org.apache.hadoop.fs.Path |
getLocalTmpPath()
Get a tmp path on local host to store intermediate data.
|
Table |
getMaterializedTable(String cteName) |
org.apache.hadoop.fs.Path |
getMRScratchDir()
Create a map-reduce scratch directory on demand and return it.
|
org.apache.hadoop.fs.Path |
getMRTmpPath()
Get a path to store map-reduce intermediate data in.
|
org.apache.hadoop.fs.Path |
getMRTmpPath(URI uri) |
CompilationOpContext |
getOpContext() |
Map<WriteEntity,List<HiveLockObj>> |
getOutputLockObjects() |
Map<String,org.apache.hadoop.fs.ContentSummary> |
getPathToCS() |
PlanMapper |
getPlanMapper() |
org.apache.hadoop.fs.Path |
getResDir() |
org.apache.hadoop.fs.Path |
getResFile() |
AtomicInteger |
getSequencer() |
StatsSource |
getStatsSource() |
DataInput |
getStream() |
org.apache.hadoop.fs.Path |
getTempDirForFinalJobPath(org.apache.hadoop.fs.Path path)
Create a temporary directory depending of the path specified.
|
org.apache.hadoop.fs.Path |
getTempDirForInterimJobPath(org.apache.hadoop.fs.Path path)
Create a temporary directory depending of the path specified.
|
Table |
getTempTableForLoad() |
org.antlr.runtime.TokenRewriteStream |
getTokenRewriteStream() |
org.antlr.runtime.TokenRewriteStream |
getViewTokenRewriteStream(String viewFullyQualifiedName) |
WmContext |
getWmContext() |
boolean |
isCboSucceeded() |
boolean |
isExplainPlan() |
boolean |
isExplainSkipExecution()
Find whether we should execute the current query due to explain
|
boolean |
isHDFSCleanup() |
boolean |
isLoadingMaterializedView() |
boolean |
isLocalOnlyExecutionMode()
Does Hive wants to run tasks entirely on the local machine
(where the query is being compiled)?
Today this translates into running hadoop jobs locally
|
boolean |
isMRTmpFileURI(String uriStr)
Check if path is for intermediate data
|
boolean |
isNeedLockMgr() |
boolean |
isSkipTableMasking() |
void |
removeMaterializedCTEs()
Remove any created directories for CTEs.
|
void |
removeScratchDir()
Remove any created scratch directories.
|
void |
resetOpContext() |
void |
resetStream() |
void |
restoreOriginalTracker() |
void |
setCboInfo(String cboInfo) |
void |
setCboSucceeded(boolean cboSucceeded) |
void |
setCmd(String cmd)
Set the original query command.
|
void |
setConf(HiveConf conf) |
void |
setExecutionIndex(int executionIndex) |
void |
setExplainConfig(ExplainConfiguration explainConfig) |
void |
setExplainPlan(boolean t) |
void |
setHDFSCleanup(boolean isHDFSCleanup) |
void |
setHeartbeater(DbTxnManager.Heartbeater heartbeater) |
void |
setHiveLocks(List<HiveLock> hiveLocks) |
void |
setHiveTxnManager(HiveTxnManager txnMgr) |
void |
setIsLoadingMaterializedView(boolean isLoadingMaterializedView) |
void |
setIsUpdateDeleteMerge(boolean isUpdate) |
void |
setNeedLockMgr(boolean needLockMgr) |
void |
setOpContext(CompilationOpContext opContext) |
void |
setOperation(Context.Operation operation) |
void |
setOriginalTracker(String originalTracker) |
void |
setResDir(org.apache.hadoop.fs.Path resDir) |
void |
setResFile(org.apache.hadoop.fs.Path resFile) |
void |
setSkipTableMasking(boolean skipTableMasking) |
void |
setStatsSource(StatsSource statsSource) |
void |
setTempTableForLoad(Table tempTableForLoad) |
void |
setTokenRewriteStream(org.antlr.runtime.TokenRewriteStream tokenRewriteStream)
Set the token rewrite stream being used to parse the current top-level SQL
statement.
|
void |
setWmContext(WmContext wmContext) |
protected int pathid
protected ExplainConfiguration explainConfig
protected String cboInfo
protected boolean cboSucceeded
protected String cmd
protected HiveTxnManager hiveTxnManager
public static final String EXT_PREFIX
public Context(org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
protected Context(Context ctx)
public void setOperation(Context.Operation operation)
public WmContext getWmContext()
public void setWmContext(WmContext wmContext)
public Context.DestClausePrefix getDestNamePrefix(ASTNode curNode, QB queryBlock)
public Context.DestClausePrefix addDestNamePrefix(int pos, Context.DestClausePrefix prefix)
pos
- ordinal index of specific TOK_INSERT as child of TOK_QUERYpublic Map<LoadTableDesc,WriteEntity> getLoadTableOutputMap()
public Map<WriteEntity,List<HiveLockObj>> getOutputLockObjects()
public boolean isExplainSkipExecution()
public boolean getExplainLogical()
public ExplainConfiguration.AnalyzeState getExplainAnalyze()
public void setCmd(String cmd)
cmd
- the original query command stringpublic String getCmd()
public org.apache.hadoop.fs.Path getLocalScratchDir(boolean mkdir)
public org.apache.hadoop.fs.Path getMRScratchDir()
public org.apache.hadoop.fs.Path getTempDirForInterimJobPath(org.apache.hadoop.fs.Path path)
BlobStorageUtils.areOptimizationsEnabled(Configuration)
are both true, then return a path on
the blobstore.
- If path is on HDFS, then create a staging directory inside the pathpath
- Path used to verify the Filesystem to use for temporary directorypublic org.apache.hadoop.fs.Path getTempDirForFinalJobPath(org.apache.hadoop.fs.Path path)
path
- Path used to verify the Filesystem to use for temporary directorypublic void removeScratchDir()
public void removeMaterializedCTEs()
public boolean isMRTmpFileURI(String uriStr)
public org.apache.hadoop.fs.Path getMRTmpPath(URI uri)
public org.apache.hadoop.fs.Path getMRTmpPath()
public org.apache.hadoop.fs.Path getLocalTmpPath()
public org.apache.hadoop.fs.Path getExternalTmpPath(org.apache.hadoop.fs.Path path)
path
- external Path to which the tmp data has to be eventually movedpublic org.apache.hadoop.fs.Path getExtTmpPathRelTo(org.apache.hadoop.fs.Path path)
public org.apache.hadoop.fs.Path getResFile()
public void setResFile(org.apache.hadoop.fs.Path resFile)
resFile
- the resFile to setpublic org.apache.hadoop.fs.Path getResDir()
public void setResDir(org.apache.hadoop.fs.Path resDir)
resDir
- the resDir to setpublic void clear() throws IOException
IOException
public DataInput getStream()
public void resetStream()
public void setTokenRewriteStream(org.antlr.runtime.TokenRewriteStream tokenRewriteStream)
tokenRewriteStream
- the stream being usedpublic org.antlr.runtime.TokenRewriteStream getTokenRewriteStream()
public void addViewTokenRewriteStream(String viewFullyQualifiedName, org.antlr.runtime.TokenRewriteStream tokenRewriteStream)
public org.antlr.runtime.TokenRewriteStream getViewTokenRewriteStream(String viewFullyQualifiedName)
public static String generateExecutionId()
public boolean isLocalOnlyExecutionMode()
public HiveTxnManager getHiveTxnManager()
public void setHiveTxnManager(HiveTxnManager txnMgr)
public void setOriginalTracker(String originalTracker)
public void restoreOriginalTracker()
public void addRewrittenStatementContext(Context context)
public void addCS(String path, org.apache.hadoop.fs.ContentSummary cs)
public org.apache.hadoop.fs.ContentSummary getCS(org.apache.hadoop.fs.Path path)
public org.apache.hadoop.fs.ContentSummary getCS(String path)
public org.apache.hadoop.conf.Configuration getConf()
public boolean isHDFSCleanup()
public void setHDFSCleanup(boolean isHDFSCleanup)
isHDFSCleanup
- the isHDFSCleanup to setpublic boolean isNeedLockMgr()
public void setNeedLockMgr(boolean needLockMgr)
public String getCboInfo()
public void setCboInfo(String cboInfo)
public boolean isCboSucceeded()
public void setCboSucceeded(boolean cboSucceeded)
public AtomicInteger getSequencer()
public CompilationOpContext getOpContext()
public void setOpContext(CompilationOpContext opContext)
public DbTxnManager.Heartbeater getHeartbeater()
public void setHeartbeater(DbTxnManager.Heartbeater heartbeater)
public void checkHeartbeaterLockException() throws LockException
LockException
public boolean isSkipTableMasking()
public void setSkipTableMasking(boolean skipTableMasking)
public ExplainConfiguration getExplainConfig()
public boolean isExplainPlan()
public void setExplainPlan(boolean t)
public void setExplainConfig(ExplainConfiguration explainConfig)
public void resetOpContext()
public boolean getIsUpdateDeleteMerge()
public void setIsUpdateDeleteMerge(boolean isUpdate)
public boolean isLoadingMaterializedView()
public void setIsLoadingMaterializedView(boolean isLoadingMaterializedView)
public String getExecutionId()
public PlanMapper getPlanMapper()
public void setStatsSource(StatsSource statsSource)
public StatsSource getStatsSource()
public int getExecutionIndex()
public void setExecutionIndex(int executionIndex)
public void setConf(HiveConf conf)
public Table getTempTableForLoad()
public void setTempTableForLoad(Table tempTableForLoad)
Copyright © 2022 The Apache Software Foundation. All rights reserved.