Package | Description |
---|---|
org.apache.hive.hcatalog.common | |
org.apache.hive.hcatalog.data | |
org.apache.hive.hcatalog.data.schema | |
org.apache.hive.hcatalog.mapreduce |
Modifier and Type | Method and Description |
---|---|
static HCatSchema |
HCatUtil.extractSchema(Partition partition) |
static HCatSchema |
HCatUtil.extractSchema(Table table) |
static HCatSchema |
HCatUtil.getPartitionColumns(Table table)
return the partition columns from a table instance
|
static HCatSchema |
HCatUtil.getTableSchemaWithPtnCols(Table table) |
Modifier and Type | Method and Description |
---|---|
static List<FieldSchema> |
HCatUtil.validatePartitionSchema(Table table,
HCatSchema partitionSchema)
Validate partition schema, checks if the column types match between the
partition and the existing table schema.
|
Modifier and Type | Method and Description |
---|---|
abstract Object |
HCatRecord.get(String fieldName,
HCatSchema recordSchema) |
Object |
DefaultHCatRecord.get(String fieldName,
HCatSchema recordSchema) |
Object |
LazyHCatRecord.get(String fieldName,
HCatSchema recordSchema) |
protected Object |
HCatRecord.get(String fieldName,
HCatSchema recordSchema,
Class clazz) |
Boolean |
HCatRecord.getBoolean(String fieldName,
HCatSchema recordSchema) |
Byte |
HCatRecord.getByte(String fieldName,
HCatSchema recordSchema) |
byte[] |
HCatRecord.getByteArray(String fieldName,
HCatSchema recordSchema) |
HiveChar |
HCatRecord.getChar(String fieldName,
HCatSchema recordSchema) |
Date |
HCatRecord.getDate(String fieldName,
HCatSchema recordSchema) |
org.apache.hadoop.hive.common.type.HiveDecimal |
HCatRecord.getDecimal(String fieldName,
HCatSchema recordSchema) |
Double |
HCatRecord.getDouble(String fieldName,
HCatSchema recordSchema) |
Float |
HCatRecord.getFloat(String fieldName,
HCatSchema recordSchema) |
Integer |
HCatRecord.getInteger(String fieldName,
HCatSchema recordSchema) |
List<?> |
HCatRecord.getList(String fieldName,
HCatSchema recordSchema) |
Long |
HCatRecord.getLong(String fieldName,
HCatSchema recordSchema) |
Map<?,?> |
HCatRecord.getMap(String fieldName,
HCatSchema recordSchema) |
Short |
HCatRecord.getShort(String fieldName,
HCatSchema recordSchema) |
String |
HCatRecord.getString(String fieldName,
HCatSchema recordSchema) |
List<? extends Object> |
HCatRecord.getStruct(String fieldName,
HCatSchema recordSchema) |
Timestamp |
HCatRecord.getTimestamp(String fieldName,
HCatSchema recordSchema) |
HiveVarchar |
HCatRecord.getVarchar(String fieldName,
HCatSchema recordSchema) |
void |
HCatRecordSerDe.initialize(HCatSchema hsch) |
abstract void |
HCatRecord.set(String fieldName,
HCatSchema recordSchema,
Object value) |
void |
DefaultHCatRecord.set(String fieldName,
HCatSchema recordSchema,
Object value) |
void |
LazyHCatRecord.set(String fieldName,
HCatSchema recordSchema,
Object value) |
void |
HCatRecord.setBoolean(String fieldName,
HCatSchema recordSchema,
Boolean value) |
void |
HCatRecord.setByte(String fieldName,
HCatSchema recordSchema,
Byte value) |
void |
HCatRecord.setByteArray(String fieldName,
HCatSchema recordSchema,
byte[] value) |
void |
HCatRecord.setChar(String fieldName,
HCatSchema recordSchema,
HiveChar value) |
void |
HCatRecord.setDate(String fieldName,
HCatSchema recordSchema,
Date value)
Note that the proper way to construct a java.sql.Date for use with this object is
Date.valueOf("1999-12-31").
|
void |
HCatRecord.setDecimal(String fieldName,
HCatSchema recordSchema,
org.apache.hadoop.hive.common.type.HiveDecimal value) |
void |
HCatRecord.setDouble(String fieldName,
HCatSchema recordSchema,
Double value) |
void |
HCatRecord.setFloat(String fieldName,
HCatSchema recordSchema,
Float value) |
void |
HCatRecord.setInteger(String fieldName,
HCatSchema recordSchema,
Integer value) |
void |
HCatRecord.setList(String fieldName,
HCatSchema recordSchema,
List<?> value) |
void |
HCatRecord.setLong(String fieldName,
HCatSchema recordSchema,
Long value) |
void |
HCatRecord.setMap(String fieldName,
HCatSchema recordSchema,
Map<?,?> value) |
void |
HCatRecord.setShort(String fieldName,
HCatSchema recordSchema,
Short value) |
void |
HCatRecord.setString(String fieldName,
HCatSchema recordSchema,
String value) |
void |
HCatRecord.setStruct(String fieldName,
HCatSchema recordSchema,
List<? extends Object> value) |
void |
HCatRecord.setTimestamp(String fieldName,
HCatSchema recordSchema,
Timestamp value) |
void |
HCatRecord.setVarchar(String fieldName,
HCatSchema recordSchema,
HiveVarchar value) |
Modifier and Type | Method and Description |
---|---|
abstract HCatSchema |
HCatSchemaUtils.HCatSchemaBuilder.build() |
HCatSchema |
HCatSchemaUtils.CollectionBuilder.build() |
HCatSchema |
HCatSchemaUtils.MapBuilder.build() |
HCatSchema |
HCatFieldSchema.getArrayElementSchema() |
static HCatSchema |
HCatSchemaUtils.getHCatSchema(List<? extends FieldSchema> fslist) |
static HCatSchema |
HCatSchemaUtils.getHCatSchema(Schema schema) |
static HCatSchema |
HCatSchemaUtils.getHCatSchema(String schemaString) |
static HCatSchema |
HCatSchemaUtils.getHCatSchema(TypeInfo typeInfo) |
static HCatSchema |
HCatSchemaUtils.getHCatSchemaFromTypeString(String typeString) |
HCatSchema |
HCatFieldSchema.getMapValueSchema() |
HCatSchema |
HCatFieldSchema.getStructSubSchema() |
Modifier and Type | Method and Description |
---|---|
static HCatFieldSchema |
HCatFieldSchema.createMapTypeFieldSchema(String fieldName,
PrimitiveTypeInfo mapKeyType,
HCatSchema mapValueSchema,
String comment) |
HCatSchemaUtils.MapBuilder |
HCatSchemaUtils.MapBuilder.withValueSchema(HCatSchema valueSchema) |
Constructor and Description |
---|
HCatFieldSchema(String fieldName,
HCatFieldSchema.Type type,
HCatFieldSchema.Type mapKeyType,
HCatSchema mapValueSchema,
String comment)
Deprecated.
as of 0.13, slated for removal with 0.15
use
HCatFieldSchema.createMapTypeFieldSchema(String, org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo, HCatSchema, String) |
HCatFieldSchema(String fieldName,
HCatFieldSchema.Type type,
HCatSchema subSchema,
String comment)
Constructor for constructing a ARRAY type or STRUCT type HCatFieldSchema, passing type and subschema
|
Modifier and Type | Method and Description |
---|---|
HCatSchema |
HCatTableInfo.getAllColumns() |
HCatSchema |
HCatTableInfo.getDataColumns() |
static HCatSchema |
HCatInputFormat.getDataColumns(org.apache.hadoop.conf.Configuration conf)
Return data columns for this input, can only be called after setInput is called.
|
HCatSchema |
HCatSplit.getDataSchema()
Gets the data schema.
|
HCatSchema |
OutputJobInfo.getOutputSchema() |
HCatSchema |
HCatTableInfo.getPartitionColumns() |
static HCatSchema |
HCatInputFormat.getPartitionColumns(org.apache.hadoop.conf.Configuration conf)
Return partitioning columns for this input, can only be called after setInput is called.
|
HCatSchema |
PartInfo.getPartitionSchema()
Gets the value of partitionSchema.
|
HCatSchema |
HCatSplit.getTableSchema()
Gets the table schema.
|
static HCatSchema |
HCatBaseInputFormat.getTableSchema(org.apache.hadoop.conf.Configuration conf)
Gets the HCatTable schema for the table specified in the HCatInputFormat.setInput call
on the specified job context.
|
static HCatSchema |
HCatBaseOutputFormat.getTableSchema(org.apache.hadoop.conf.Configuration conf)
Gets the table schema for the table specified in the HCatOutputFormat.setOutput call
on the specified job context.
|
static HCatSchema |
HCatBaseOutputFormat.getTableSchemaWithPartitionColumns(org.apache.hadoop.conf.Configuration conf)
Gets the table schema for the table specified in the HCatOutputFormat.setOutput call
on the specified job context.
|
Modifier and Type | Method and Description |
---|---|
void |
OutputJobInfo.setOutputSchema(HCatSchema schema) |
static void |
HCatBaseInputFormat.setOutputSchema(org.apache.hadoop.mapreduce.Job job,
HCatSchema hcatSchema)
Set the schema for the HCatRecord data returned by HCatInputFormat.
|
protected static void |
HCatBaseOutputFormat.setPartDetails(OutputJobInfo jobInfo,
HCatSchema schema,
Map<String,String> partMap) |
static void |
HCatOutputFormat.setSchema(org.apache.hadoop.conf.Configuration conf,
HCatSchema schema)
Set the schema for the data being written out to the partition.
|
static void |
HCatOutputFormat.setSchema(org.apache.hadoop.mapreduce.Job job,
HCatSchema schema) |
Constructor and Description |
---|
PartInfo(HCatSchema partitionSchema,
HiveStorageHandler storageHandler,
String location,
Properties hcatProperties,
Map<String,String> jobProperties,
HCatTableInfo tableInfo)
Instantiates a new hcat partition info.
|
Copyright © 2022 The Apache Software Foundation. All rights reserved.