Saving all output to "!!{outputDirectory}!!/alter_concatenate_indexed_table.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/alter_concatenate_indexed_table.q >>> set hive.exec.concatenate.check.index =false; No rows affected >>> create table src_rc_concatenate_test(key int, value string) stored as rcfile; No rows affected >>> >>> load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test; No rows affected >>> load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test; No rows affected >>> load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test; No rows affected >>> >>> show table extended like `src_rc_concatenate_test`; 'tab_name' 'tableName:src_rc_concatenate_test' 'owner:!!{user.name}!!' 'location:!!{hive.metastore.warehouse.dir}!!/alter_concatenate_indexed_table.db/src_rc_concatenate_test' 'inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat' 'outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat' 'columns:struct columns { i32 key, string value}' 'partitioned:false' 'partitionColumns:' 'totalNumberFiles:3' 'totalFileSize:636' 'maxFileSize:222' 'minFileSize:206' 'lastAccessTime:0' 'lastUpdateTime:!!UNIXTIMEMILLIS!!' '' 15 rows selected >>> >>> select count(1) from src_rc_concatenate_test; '_c0' '15' 1 row selected >>> select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test; '_c0','_c1' '214','-7678496319' 1 row selected >>> >>> create index src_rc_concatenate_test_index on table src_rc_concatenate_test(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2"); No rows affected >>> show indexes on src_rc_concatenate_test; 'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' 'src_rc_concatenate_test_index','src_rc_concatenate_test','key ','alter_concatenate_indexed_table__src_rc_concatenate_test_src_rc_concatenate_test_index__','compact ','' 1 row selected >>> >>> alter table src_rc_concatenate_test concatenate; No rows affected >>> >>> show table extended like `src_rc_concatenate_test`; 'tab_name' 'tableName:src_rc_concatenate_test' 'owner:!!{user.name}!!' 'location:!!{hive.metastore.warehouse.dir}!!/alter_concatenate_indexed_table.db/src_rc_concatenate_test' 'inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat' 'outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat' 'columns:struct columns { i32 key, string value}' 'partitioned:false' 'partitionColumns:' 'totalNumberFiles:1' 'totalFileSize:239' 'maxFileSize:239' 'minFileSize:239' 'lastAccessTime:0' 'lastUpdateTime:!!UNIXTIMEMILLIS!!' '' 15 rows selected >>> >>> select count(1) from src_rc_concatenate_test; '_c0' '15' 1 row selected >>> select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test; '_c0','_c1' '214','-7678496319' 1 row selected >>> >>> drop index src_rc_concatenate_test_index on src_rc_concatenate_test; No rows affected >>> >>> create table src_rc_concatenate_test_part(key int, value string) partitioned by (ds string) stored as rcfile; No rows affected >>> >>> alter table src_rc_concatenate_test_part add partition (ds='2011'); No rows affected >>> >>> load data local inpath '../data/files/smbbucket_1.rc' into table src_rc_concatenate_test_part partition (ds='2011'); No rows affected >>> load data local inpath '../data/files/smbbucket_2.rc' into table src_rc_concatenate_test_part partition (ds='2011'); No rows affected >>> load data local inpath '../data/files/smbbucket_3.rc' into table src_rc_concatenate_test_part partition (ds='2011'); No rows affected >>> >>> show table extended like `src_rc_concatenate_test_part` partition (ds='2011'); 'tab_name' 'tableName:src_rc_concatenate_test_part' 'owner:!!{user.name}!!' 'location:!!{hive.metastore.warehouse.dir}!!/alter_concatenate_indexed_table.db/src_rc_concatenate_test_part/ds=2011' 'inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat' 'outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat' 'columns:struct columns { i32 key, string value}' 'partitioned:true' 'partitionColumns:struct partition_columns { string ds}' 'totalNumberFiles:3' 'totalFileSize:636' 'maxFileSize:222' 'minFileSize:206' 'lastAccessTime:0' 'lastUpdateTime:!!UNIXTIMEMILLIS!!' '' 15 rows selected >>> >>> select count(1) from src_rc_concatenate_test_part; '_c0' '15' 1 row selected >>> select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part; '_c0','_c1' '214','-7678496319' 1 row selected >>> >>> create index src_rc_concatenate_test_part_index on table src_rc_concatenate_test_part(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2"); No rows affected >>> show indexes on src_rc_concatenate_test_part; 'idx_name','tab_name','col_names','idx_tab_name','idx_type','comment' 'src_rc_concatenate_test_part_index','src_rc_concatenate_test_part','key ','alter_concatenate_indexed_table__src_rc_concatenate_test_part_src_rc_concatenate_test_part_index__','compact ','' 1 row selected >>> >>> alter table src_rc_concatenate_test_part partition (ds='2011') concatenate; No rows affected >>> >>> show table extended like `src_rc_concatenate_test_part` partition (ds='2011'); 'tab_name' 'tableName:src_rc_concatenate_test_part' 'owner:!!{user.name}!!' 'location:!!{hive.metastore.warehouse.dir}!!/alter_concatenate_indexed_table.db/src_rc_concatenate_test_part/ds=2011' 'inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat' 'outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat' 'columns:struct columns { i32 key, string value}' 'partitioned:true' 'partitionColumns:struct partition_columns { string ds}' 'totalNumberFiles:1' 'totalFileSize:239' 'maxFileSize:239' 'minFileSize:239' 'lastAccessTime:0' 'lastUpdateTime:!!UNIXTIMEMILLIS!!' '' 15 rows selected >>> >>> select count(1) from src_rc_concatenate_test_part; '_c0' '15' 1 row selected >>> select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part; '_c0','_c1' '214','-7678496319' 1 row selected >>> >>> drop index src_rc_concatenate_test_part_index on src_rc_concatenate_test_part; No rows affected >>> !record