Saving all output to "!!{outputDirectory}!!/sample5.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/sample5.q >>> CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; No rows affected >>> >>> -- no input pruning, sample filter >>> EXPLAIN EXTENDED INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcbucket) (TOK_TABLEBUCKETSAMPLE 1 5 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME s))))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' s ' ' TableScan' ' alias: s' ' GatherStats: false' ' Filter Operator' ' isSamplingPred: true' ' predicate:' ' expr: (((hash(key) & 2147483647) % 5) = 0)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: int' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' TotalFiles: 1' ' GatherStats: true' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' !!{hive.metastore.warehouse.dir}!!/sample5.db/srcbucket [s]' ' Path -> Partition:' ' !!{hive.metastore.warehouse.dir}!!/sample5.db/srcbucket ' ' Partition' ' base file name: srcbucket' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count 2' ' bucket_field_name key' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/srcbucket' ' name sample5.srcbucket' ' numFiles 2' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct srcbucket { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 11603' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count 2' ' bucket_field_name key' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/srcbucket' ' name sample5.srcbucket' ' numFiles 2' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct srcbucket { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 11603' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.srcbucket' ' name: sample5.srcbucket' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' tmp directory: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-2' ' Stats-Aggr Operator' ' Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]' ' Path -> Partition:' ' pfile:!!{hive.exec.scratchdir}!! ' ' Partition' ' base file name: -ext-10002' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' name: sample5.dest1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]' ' Path -> Partition:' ' pfile:!!{hive.exec.scratchdir}!! ' ' Partition' ' base file name: -ext-10002' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample5.db/dest1' ' name sample5.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample5.dest1' ' name: sample5.dest1' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 298 rows selected >>> >>> INSERT OVERWRITE TABLE dest1 SELECT s.* -- here's another test FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 on key) s; 'key','value' No rows selected >>> >>> SELECT dest1.* FROM dest1 SORT BY key, value; 'key','value' '0','val_0' '0','val_0' '0','val_0' '0','val_1' '0','val_1' '5','val_5' '5','val_5' '5','val_5' '5','val_6' '10','val_10' '10','val_11' '15','val_15' '15','val_15' '15','val_16' '15','val_16' '20','val_20' '20','val_21' '20','val_21' '30','val_30' '30','val_31' '35','val_35' '35','val_35' '35','val_35' '35','val_36' '35','val_36' '35','val_36' '40','val_41' '40','val_41' '50','val_51' '60','val_61' '65','val_65' '65','val_66' '65','val_66' '70','val_70' '70','val_70' '70','val_70' '70','val_71' '75','val_76' '80','val_80' '80','val_81' '85','val_85' '85','val_86' '90','val_90' '90','val_90' '90','val_90' '95','val_95' '95','val_95' '100','val_100' '100','val_100' '100','val_101' '100','val_101' '105','val_105' '105','val_106' '105','val_106' '110','val_111' '120','val_120' '120','val_120' '120','val_121' '125','val_125' '125','val_125' '125','val_126' '135','val_136' '135','val_136' '135','val_136' '140','val_141' '145','val_145' '150','val_150' '155','val_155' '160','val_160' '160','val_161' '165','val_165' '165','val_165' '165','val_166' '170','val_170' '170','val_171' '175','val_175' '175','val_175' '175','val_176' '175','val_176' '180','val_180' '185','val_186' '190','val_190' '195','val_195' '195','val_195' '200','val_200' '200','val_200' '205','val_205' '205','val_205' '205','val_206' '215','val_216' '230','val_230' '230','val_230' '230','val_230' '230','val_230' '230','val_230' '235','val_235' '235','val_236' '240','val_241' '245','val_246' '245','val_246' '255','val_255' '255','val_255' '260','val_260' '260','val_261' '260','val_261' '265','val_265' '265','val_265' '265','val_266' '275','val_275' '275','val_276' '275','val_276' '280','val_280' '280','val_280' '285','val_285' '285','val_286' '295','val_296' '295','val_296' '300','val_301' '300','val_301' '305','val_305' '305','val_306' '310','val_310' '310','val_311' '310','val_311' '310','val_311' '315','val_315' '325','val_325' '325','val_325' '330','val_331' '335','val_335' '335','val_336' '335','val_336' '340','val_341' '345','val_345' '355','val_356' '355','val_356' '360','val_360' '360','val_361' '365','val_365' '375','val_375' '375','val_376' '375','val_376' '375','val_376' '375','val_376' '375','val_376' '385','val_386' '385','val_386' '390','val_391' '390','val_391' '390','val_391' '395','val_395' '395','val_395' '395','val_396' '395','val_396' '400','val_400' '405','val_406' '410','val_411' '415','val_416' '425','val_426' '430','val_430' '430','val_430' '430','val_430' '430','val_431' '435','val_435' '435','val_436' '440','val_441' '440','val_441' '450','val_451' '450','val_451' '455','val_455' '455','val_456' '455','val_456' '460','val_460' '470','val_470' '470','val_471' '475','val_475' '475','val_476' '480','val_480' '480','val_480' '480','val_480' '480','val_481' '480','val_481' '485','val_485' '485','val_486' '485','val_486' '490','val_490' '490','val_491' '495','val_495' '495','val_496' 189 rows selected >>> !record