org.apache.hadoop.hive.serde2.ColumnProjectionUtils.appendReadColumnIDs(Configuration, List)
for backwards compatibility with <= 0.12, use appendReadColumns
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.bucketCols(List, int) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.collectionItemsTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.comments(String) |
org.apache.hadoop.hive.ql.metadata.HiveStorageHandler.configureTableJobProperties(TableDesc, Map) |
org.apache.hadoop.hive.ql.io.orc.InStream.create(String, ByteBuffer[], long[], long, CompressionCodec, int) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.create(String, String, List) |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.create(String, String, String, Map) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createColumnarStructInspector(List, List, byte[], Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo, byte[], int, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo, byte[], int, Text, boolean, byte, boolean) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo, byte[], int, Text, boolean, byte, boolean, ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo, byte[], int, Text, boolean, byte, ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyStructInspector(List, List, byte[], Text, boolean, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyStructInspector(List, List, byte[], Text, boolean, boolean, byte, boolean) |
org.apache.hadoop.hive.metastore.IMetaStoreClient.dropTable(String, boolean)
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(String, boolean) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.escapeChar(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.fieldsTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.fileFormat(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getBucketCols() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getCols() |
org.apache.hadoop.hive.serde2.dynamic_type.SimpleCharStream.getColumn() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getComments() |
org.apache.hadoop.hive.ql.io.RCFile.Writer.getCompressionCodec() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getDatabaseName() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getDatabaseName() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getExternal() |
org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension(JobConf, boolean)
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getFileFormat() |
org.apache.hive.hcatalog.common.HCatUtil.getHiveClient(HiveConf) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleListObjectInspector(ObjectInspector, byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(ObjectInspector, ObjectInspector, byte, byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List, List, byte, Text, boolean, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List, List, byte, Text, boolean, boolean, byte, ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List, List, List, byte, Text, boolean, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List, List, List, byte, Text, boolean, boolean, byte, ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazyUnionObjectInspector(List, byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.dynamic_type.SimpleCharStream.getLine() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getLocation() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getLocation() |
org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getMapKeyType()
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getNumBuckets() |
org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getOutputFormatFinalPath(Path, String, JobConf, HiveOutputFormat, ?>, boolean, Path) |
org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo.getParameters() |
org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo.getParameters() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getPartitionCols() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getPartitionSpec() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getSerdeParams() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getSortCols() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getStorageHandler() |
org.apache.hadoop.hive.metastore.IMetaStoreClient.getTable(String)
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getTableName() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getTableName() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getTblProps() |
org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getType()
|
org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(Configuration, Properties) |
org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe.initialize(Configuration, Properties) |
org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.initialize(ObjectInspector[]) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.isTableExternal(boolean) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.linesTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.location(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.mapKeysTerminatedBy(char) |
org.apache.hadoop.hive.ql.io.RCFile.Reader.nextColumnsBatch() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.nullDefinedAs(char) |
org.apache.hive.service.cli.CLIService.openSession(TProtocolVersion, String, String, Map)
|
org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(TProtocolVersion, String, String, Map, String)
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.partCols(List) |
org.apache.hadoop.hive.ql.io.NonSyncDataInputBuffer.readLine()
Use BufferedReader
|
org.apache.hadoop.hive.ql.hooks.PostExecute.run(SessionState, Set, Set, LineageInfo, UserGroupInformation) |
org.apache.hadoop.hive.ql.hooks.PreExecute.run(SessionState, Set, Set, UserGroupInformation) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.serdeParam(String, String) |
org.apache.hadoop.hive.ql.io.RCFile.ValueBuffer.setColumnValueBuffer(NonSyncDataOutputBuffer, int) |
org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setFilter(String)
|
org.apache.hadoop.hive.serde2.ColumnProjectionUtils.setFullyReadColumns(Configuration)
for backwards compatibility with <= 0.12, use setReadAllColumns
|
org.apache.hadoop.hive.serde2.ColumnProjectionUtils.setReadColumnIDs(Configuration, List)
for backwards compatibility with <= 0.12, use setReadAllColumns
and appendReadColumns
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.sortCols(ArrayList) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.storageHandler(String) |
org.apache.hadoop.hive.metastore.IMetaStoreClient.tableExists(String)
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.tableExists(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.tblProps(Map) |
org.apache.hadoop.hive.metastore.ObjectStore.updateMStorageDescriptorTblPropURI(URI, URI, String, boolean) |