public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStorageHandler
Modifier and Type | Field and Description |
---|---|
protected static SessionState.LogHelper |
console |
static String |
INTERMEDIATE_SEGMENT_DIR_NAME |
protected static org.slf4j.Logger |
LOG |
static String |
SEGMENTS_DESCRIPTOR_DIR_NAME |
ALTER_TABLE_OPERATION_TYPE
Constructor and Description |
---|
DruidStorageHandler() |
DruidStorageHandler(io.druid.metadata.SQLMetadataConnector connector,
io.druid.metadata.MetadataStorageTablesConfig druidMetadataStorageTablesConfig) |
Modifier and Type | Method and Description |
---|---|
void |
commitCreateTable(Table table)
Called after successfully adding a new table definition to the metastore
during CREATE TABLE.
|
void |
commitDropTable(Table table,
boolean deleteData)
Called after successfully removing a table definition from the metastore
during DROP TABLE.
|
void |
commitInsertTable(Table table,
boolean overwrite)
Called after successfully INSERT [OVERWRITE] statement is executed.
|
void |
configureInputJobCredentials(TableDesc tableDesc,
Map<String,String> jobSecrets)
This method is called to allow the StorageHandlers the chance to
populate secret keys into the job's credentials.
|
void |
configureInputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance
to populate the JobContext.getConfiguration() with properties that
maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc).
|
void |
configureJobConf(TableDesc tableDesc,
org.apache.hadoop.mapred.JobConf jobConf)
Called just before submitting MapReduce job.
|
void |
configureOutputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance
to populate the JobContext.getConfiguration() with properties that
maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc).
|
void |
configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
Deprecated use configureInputJobProperties/configureOutputJobProperties
methods instead.
|
protected void |
deleteSegment(io.druid.timeline.DataSegment segment) |
HiveAuthorizationProvider |
getAuthorizationProvider()
Returns the implementation specific authorization provider
|
org.apache.hadoop.conf.Configuration |
getConf() |
static com.metamx.http.client.HttpClient |
getHttpClient() |
Class<? extends org.apache.hadoop.mapred.InputFormat> |
getInputFormatClass() |
LockType |
getLockType(WriteEntity writeEntity) |
HiveMetaHook |
getMetaHook() |
Class<? extends org.apache.hadoop.mapred.OutputFormat> |
getOutputFormatClass() |
Class<? extends AbstractSerDe> |
getSerDeClass() |
StorageHandlerInfo |
getStorageHandlerInfo(Table table)
Used to fetch runtime information about storage handler during DESCRIBE EXTENDED statement
|
String |
getUniqueId() |
protected List<io.druid.timeline.DataSegment> |
loadAndCommitDruidSegments(Table table,
boolean overwrite,
List<io.druid.timeline.DataSegment> segmentsToLoad)
Creates metadata moves then commit the Segment's metadata to Druid metadata store in one TxN
|
protected String |
makeStagingName() |
void |
preAlterTable(Table table,
EnvironmentContext context)
Called before a table is altered in the metastore
during ALTER TABLE.
|
void |
preCreateTable(Table table)
Called before a new table definition is added to the metastore
during CREATE TABLE.
|
void |
preDropTable(Table table)
Called before a table definition is removed from the metastore
during DROP TABLE.
|
void |
preInsertTable(Table table,
boolean overwrite)
called before commit insert method is called
|
void |
rollbackCreateTable(Table table)
Called after failure adding a new table definition to the metastore
during CREATE TABLE.
|
void |
rollbackDropTable(Table table)
Called after failure removing a table definition from the metastore
during DROP TABLE.
|
void |
rollbackInsertTable(Table table,
boolean overwrite)
called in case pre commit or commit insert fail.
|
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
String |
toString() |
protected static final org.slf4j.Logger LOG
protected static final SessionState.LogHelper console
public static final String SEGMENTS_DESCRIPTOR_DIR_NAME
public static final String INTERMEDIATE_SEGMENT_DIR_NAME
public DruidStorageHandler()
public DruidStorageHandler(io.druid.metadata.SQLMetadataConnector connector, io.druid.metadata.MetadataStorageTablesConfig druidMetadataStorageTablesConfig)
public Class<? extends org.apache.hadoop.mapred.InputFormat> getInputFormatClass()
getInputFormatClass
in interface HiveStorageHandler
InputFormat
public Class<? extends org.apache.hadoop.mapred.OutputFormat> getOutputFormatClass()
getOutputFormatClass
in interface HiveStorageHandler
OutputFormat
public Class<? extends AbstractSerDe> getSerDeClass()
getSerDeClass
in interface HiveStorageHandler
AbstractSerDe
public HiveMetaHook getMetaHook()
getMetaHook
in interface HiveStorageHandler
public HiveAuthorizationProvider getAuthorizationProvider()
HiveStorageHandler
getAuthorizationProvider
in interface HiveStorageHandler
public void configureInputJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandler
configureInputJobProperties
in interface HiveStorageHandler
tableDesc
- descriptor for the table being accessedjobProperties
- receives properties copied or transformed
from the table propertiespublic void configureInputJobCredentials(TableDesc tableDesc, Map<String,String> jobSecrets)
HiveStorageHandler
configureInputJobCredentials
in interface HiveStorageHandler
public void preCreateTable(Table table) throws MetaException
HiveMetaHook
preCreateTable
in interface HiveMetaHook
table
- new table definitionMetaException
public void rollbackCreateTable(Table table)
HiveMetaHook
rollbackCreateTable
in interface HiveMetaHook
table
- new table definitionpublic void commitCreateTable(Table table) throws MetaException
HiveMetaHook
commitCreateTable
in interface HiveMetaHook
table
- new table definitionMetaException
protected List<io.druid.timeline.DataSegment> loadAndCommitDruidSegments(Table table, boolean overwrite, List<io.druid.timeline.DataSegment> segmentsToLoad) throws IOException, org.skife.jdbi.v2.exceptions.CallbackFailedException
table
- Hive tableoverwrite
- true if it is an insert overwrite tableMetaException
- if errors occurs.IOException
org.skife.jdbi.v2.exceptions.CallbackFailedException
protected void deleteSegment(io.druid.timeline.DataSegment segment) throws io.druid.segment.loading.SegmentLoadingException
io.druid.segment.loading.SegmentLoadingException
public void preDropTable(Table table)
HiveMetaHook
preDropTable
in interface HiveMetaHook
table
- table definitionpublic void rollbackDropTable(Table table)
HiveMetaHook
rollbackDropTable
in interface HiveMetaHook
table
- table definitionpublic void commitDropTable(Table table, boolean deleteData)
HiveMetaHook
commitDropTable
in interface HiveMetaHook
table
- table definitiondeleteData
- whether to delete data as well; this should typically
be ignored in the case of an external tablepublic void commitInsertTable(Table table, boolean overwrite) throws MetaException
DefaultHiveMetaHook
commitInsertTable
in class DefaultHiveMetaHook
table
- table definitionoverwrite
- true if it is INSERT OVERWRITEMetaException
public void preInsertTable(Table table, boolean overwrite)
DefaultHiveMetaHook
preInsertTable
in class DefaultHiveMetaHook
table
- table definitionoverwrite
- true if it is INSERT OVERWRITEpublic void rollbackInsertTable(Table table, boolean overwrite)
DefaultHiveMetaHook
rollbackInsertTable
in class DefaultHiveMetaHook
table
- table definitionoverwrite
- true if it is INSERT OVERWRITEpublic void configureOutputJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandler
configureOutputJobProperties
in interface HiveStorageHandler
tableDesc
- descriptor for the table being accessedjobProperties
- receives properties copied or transformed
from the table propertiespublic void configureTableJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandler
configureTableJobProperties
in interface HiveStorageHandler
tableDesc
- descriptor for the table being accessedjobProperties
- receives properties copied or transformed
from the table propertiespublic void configureJobConf(TableDesc tableDesc, org.apache.hadoop.mapred.JobConf jobConf)
HiveStorageHandler
configureJobConf
in interface HiveStorageHandler
tableDesc
- descriptor for the table being accessedjobConf
- jobConf for MapReduce jobpublic void setConf(org.apache.hadoop.conf.Configuration conf)
setConf
in interface org.apache.hadoop.conf.Configurable
public org.apache.hadoop.conf.Configuration getConf()
getConf
in interface org.apache.hadoop.conf.Configurable
public LockType getLockType(WriteEntity writeEntity)
getLockType
in interface HiveStorageHandler
public String getUniqueId()
protected String makeStagingName()
public static com.metamx.http.client.HttpClient getHttpClient()
public void preAlterTable(Table table, EnvironmentContext context) throws MetaException
HiveMetaHook
preAlterTable
in interface HiveMetaHook
table
- new table definitionMetaException
public StorageHandlerInfo getStorageHandlerInfo(Table table) throws MetaException
HiveStorageHandler
getStorageHandlerInfo
in interface HiveStorageHandler
table
- table definitionMetaException
Copyright © 2022 The Apache Software Foundation. All rights reserved.