Saving all output to "!!{outputDirectory}!!/alter_numbuckets_partitioned_table.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/alter_numbuckets_partitioned_table.q >>> >>> create table tst1(key string, value string) partitioned by (ds string) clustered by (key) into 10 buckets; No rows affected >>> >>> alter table tst1 clustered by (key) into 8 buckets; No rows affected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','8 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 34 rows selected >>> >>> set hive.enforce.bucketing=true; No rows affected >>> insert overwrite table tst1 partition (ds='1') select key, value from src; 'key','value' No rows selected >>> >>> describe formatted tst1 partition (ds = '1'); 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Partition Information','','' 'Partition Value: ','[1] ','' 'Database: ','alter_numbuckets_partitioned_table','' 'Table: ','tst1 ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1/ds=1','' 'Partition Parameters:','','' '','numFiles ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','8 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 35 rows selected >>> >>> -- Test changing bucket number >>> >>> alter table tst1 clustered by (key) into 12 buckets; No rows affected >>> >>> insert overwrite table tst1 partition (ds='1') select key, value from src; 'key','value' No rows selected >>> >>> describe formatted tst1 partition (ds = '1'); 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Partition Information','','' 'Partition Value: ','[1] ','' 'Database: ','alter_numbuckets_partitioned_table','' 'Table: ','tst1 ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1/ds=1','' 'Partition Parameters:','','' '','numFiles ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','8 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 35 rows selected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','numFiles ','1 ' '','numPartitions ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','12 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 39 rows selected >>> >>> -- Test adding sort order >>> >>> alter table tst1 clustered by (key) sorted by (key asc) into 12 buckets; No rows affected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','numFiles ','1 ' '','numPartitions ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','12 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[Order(col:key, order:1)]','' 'Storage Desc Params:','','' '','serialization.format','1 ' 39 rows selected >>> >>> -- Test changing sort order >>> >>> alter table tst1 clustered by (key) sorted by (value desc) into 12 buckets; No rows affected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','numFiles ','1 ' '','numPartitions ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','12 ','' 'Bucket Columns: ','[key] ','' 'Sort Columns: ','[Order(col:value, order:0)]','' 'Storage Desc Params:','','' '','serialization.format','1 ' 39 rows selected >>> >>> -- Test removing test order >>> >>> alter table tst1 clustered by (value) into 12 buckets; No rows affected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','numFiles ','1 ' '','numPartitions ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','12 ','' 'Bucket Columns: ','[value] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 39 rows selected >>> >>> -- Test removing buckets >>> >>> alter table tst1 not clustered; No rows affected >>> >>> describe formatted tst1; 'col_name','data_type','comment' '# col_name ','data_type ','comment ' '','','' 'key ','string ','None ' 'value ','string ','None ' '','','' '# Partition Information','','' '# col_name ','data_type ','comment ' '','','' 'ds ','string ','None ' '','','' '# Detailed Table Information','','' 'Database: ','alter_numbuckets_partitioned_table','' 'Owner: ','!!{user.name}!! ','' 'CreateTime: ','!!TIMESTAMP!!','' 'LastAccessTime: ','UNKNOWN ','' 'Protect Mode: ','None ','' 'Retention: ','0 ','' 'Location: ','!!{hive.metastore.warehouse.dir}!!/alter_numbuckets_partitioned_table.db/tst1','' 'Table Type: ','MANAGED_TABLE ','' 'Table Parameters:','','' '','last_modified_by ','!!{user.name}!! ' '','last_modified_time ','!!UNIXTIME!! ' '','numFiles ','1 ' '','numPartitions ','1 ' '','numRows ','500 ' '','rawDataSize ','5312 ' '','totalSize ','5812 ' '','transient_lastDdlTime','!!UNIXTIME!! ' '','','' '# Storage Information','','' 'SerDe Library: ','org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe','' 'InputFormat: ','org.apache.hadoop.mapred.TextInputFormat','' 'OutputFormat: ','org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat','' 'Compressed: ','No ','' 'Num Buckets: ','-1 ','' 'Bucket Columns: ','[] ','' 'Sort Columns: ','[] ','' 'Storage Desc Params:','','' '','serialization.format','1 ' 39 rows selected >>> !record