public class Partition extends Object implements Serializable
Constructor and Description |
---|
Partition()
Used only for serialization.
|
Partition(Table tbl)
create an empty partition.
|
Partition(Table tbl,
Map<String,String> partSpec,
org.apache.hadoop.fs.Path location)
Create partition object with the given info.
|
Partition(Table tbl,
Partition tp) |
Modifier and Type | Method and Description |
---|---|
boolean |
canDrop() |
boolean |
canWrite() |
void |
checkValidity() |
static StorageDescriptor |
cloneSd(Table tbl)
We already have methods that clone stuff using XML or Kryo.
|
static Partition |
createMetaPartitionObject(Table tbl,
Map<String,String> partSpec,
org.apache.hadoop.fs.Path location) |
List<String> |
getBucketCols() |
int |
getBucketCount() |
org.apache.hadoop.fs.Path |
getBucketPath(int bucketNum)
mapping from bucket number to bucket path
|
List<FieldSchema> |
getCols() |
String |
getCompleteName() |
org.apache.hadoop.fs.Path |
getDataLocation() |
Deserializer |
getDeserializer() |
Class<? extends org.apache.hadoop.mapred.InputFormat> |
getInputFormatClass() |
int |
getLastAccessTime() |
String |
getLocation() |
Properties |
getMetadataFromPartitionSchema() |
String |
getName() |
Class<? extends org.apache.hadoop.mapred.OutputFormat> |
getOutputFormatClass() |
Map<String,String> |
getParameters() |
org.apache.hadoop.fs.Path |
getPartitionPath() |
org.apache.hadoop.fs.Path[] |
getPath() |
org.apache.hadoop.fs.Path[] |
getPath(Sample s) |
ProtectMode |
getProtectMode() |
Properties |
getSchema() |
Properties |
getSchemaFromTableSchema(Properties tblSchema) |
List<String> |
getSkewedColNames() |
Map<List<String>,String> |
getSkewedColValueLocationMaps() |
List<List<String>> |
getSkewedColValues() |
List<String> |
getSortColNames() |
List<Order> |
getSortCols() |
org.apache.hadoop.fs.FileStatus[] |
getSortedPaths()
get all paths for this partition in a sorted manner
|
LinkedHashMap<String,String> |
getSpec() |
Table |
getTable() |
Partition |
getTPartition()
Should be only used by serialization.
|
List<String> |
getValues() |
protected void |
initialize(Table table,
Partition tPartition)
Initializes this object with the given variables
|
boolean |
isOffline() |
boolean |
isStoredAsSubDirectories() |
void |
setBucketCount(int newBucketNum) |
void |
setInputFormatClass(Class<? extends org.apache.hadoop.mapred.InputFormat> inputFormatClass) |
void |
setLastAccessTime(int lastAccessTime) |
void |
setLocation(String location) |
void |
setOutputFormatClass(Class<? extends HiveOutputFormat> outputFormatClass) |
void |
setProtectMode(ProtectMode protectMode) |
void |
setSkewedValueLocationMap(List<String> valList,
String dirName) |
void |
setTable(Table table)
Should be only used by serialization.
|
void |
setTPartition(Partition partition)
Should be only used by serialization.
|
void |
setValues(Map<String,String> partSpec)
Set Partition's values
|
String |
toString() |
public Partition()
public Partition(Table tbl) throws HiveException
HiveException
public Partition(Table tbl, Partition tp) throws HiveException
HiveException
public Partition(Table tbl, Map<String,String> partSpec, org.apache.hadoop.fs.Path location) throws HiveException
tbl
- Table the partition will be in.partSpec
- Partition specifications.location
- Location of the partition, relative to the table.HiveException
- Thrown if we could not create the partition.public List<String> getValues()
Partition.getValues()
public static Partition createMetaPartitionObject(Table tbl, Map<String,String> partSpec, org.apache.hadoop.fs.Path location) throws HiveException
HiveException
public static StorageDescriptor cloneSd(Table tbl) throws HiveException
HiveException
protected void initialize(Table table, Partition tPartition) throws HiveException
table
- Table the partition belongs totPartition
- Thrift Partition objectHiveException
- Thrown if we cannot initialize the partitionpublic String getName()
public org.apache.hadoop.fs.Path[] getPath()
public org.apache.hadoop.fs.Path getPartitionPath()
public org.apache.hadoop.fs.Path getDataLocation()
public final Deserializer getDeserializer()
public Properties getSchema()
public Properties getMetadataFromPartitionSchema()
public Properties getSchemaFromTableSchema(Properties tblSchema)
public void setInputFormatClass(Class<? extends org.apache.hadoop.mapred.InputFormat> inputFormatClass)
inputFormatClass
- public void setOutputFormatClass(Class<? extends HiveOutputFormat> outputFormatClass)
outputFormatClass
- public final Class<? extends org.apache.hadoop.mapred.InputFormat> getInputFormatClass() throws HiveException
HiveException
public final Class<? extends org.apache.hadoop.mapred.OutputFormat> getOutputFormatClass() throws HiveException
HiveException
public int getBucketCount()
public void setBucketCount(int newBucketNum)
public org.apache.hadoop.fs.FileStatus[] getSortedPaths()
public org.apache.hadoop.fs.Path getBucketPath(int bucketNum)
public org.apache.hadoop.fs.Path[] getPath(Sample s) throws HiveException
HiveException
public LinkedHashMap<String,String> getSpec()
public Table getTable()
public void setTable(Table table)
public Partition getTPartition()
public void setTPartition(Partition partition)
public List<FieldSchema> getCols()
public String getLocation()
public void setLocation(String location)
public void setValues(Map<String,String> partSpec) throws HiveException
partSpec
- Partition specifications.HiveException
- Thrown if we could not create the partition.public void setProtectMode(ProtectMode protectMode)
protectMode
- public ProtectMode getProtectMode()
public boolean isOffline()
public boolean canDrop()
public boolean canWrite()
public String getCompleteName()
public int getLastAccessTime()
public void setLastAccessTime(int lastAccessTime)
public boolean isStoredAsSubDirectories()
public void setSkewedValueLocationMap(List<String> valList, String dirName) throws HiveException
HiveException
public void checkValidity() throws HiveException
HiveException
Copyright © 2017 The Apache Software Foundation. All rights reserved.