Saving all output to "!!{outputDirectory}!!/sample7.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/sample7.q >>> CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; No rows affected >>> >>> -- both input pruning and sample filter >>> EXPLAIN EXTENDED INSERT OVERWRITE TABLE dest1 SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s WHERE s.key > 100; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcbucket) (TOK_TABLEBUCKETSAMPLE 1 4 (TOK_TABLE_OR_COL key)) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME s)))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL s) key) 100))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' s ' ' TableScan' ' alias: s' ' GatherStats: false' ' Filter Operator' ' isSamplingPred: false' ' predicate:' ' expr: (key > 100)' ' type: boolean' ' Filter Operator' ' isSamplingPred: true' ' predicate:' ' expr: (((hash(key) & 2147483647) % 4) = 0)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: int' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' TotalFiles: 1' ' GatherStats: true' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' !!{hive.metastore.warehouse.dir}!!/sample7.db/srcbucket/srcbucket0.txt [s]' ' Path -> Partition:' ' !!{hive.metastore.warehouse.dir}!!/sample7.db/srcbucket/srcbucket0.txt ' ' Partition' ' base file name: srcbucket0.txt' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count 2' ' bucket_field_name key' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/srcbucket' ' name sample7.srcbucket' ' numFiles 2' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct srcbucket { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 11603' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count 2' ' bucket_field_name key' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/srcbucket' ' name sample7.srcbucket' ' numFiles 2' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct srcbucket { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 11603' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.srcbucket' ' name: sample7.srcbucket' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' tmp directory: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-2' ' Stats-Aggr Operator' ' Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]' ' Path -> Partition:' ' pfile:!!{hive.exec.scratchdir}!! ' ' Partition' ' base file name: -ext-10002' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' name: sample7.dest1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: pfile:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' pfile:!!{hive.exec.scratchdir}!! [pfile:!!{hive.exec.scratchdir}!!]' ' Path -> Partition:' ' pfile:!!{hive.exec.scratchdir}!! ' ' Partition' ' base file name: -ext-10002' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types int:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/sample7.db/dest1' ' name sample7.dest1' ' serialization.ddl struct dest1 { i32 key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: sample7.dest1' ' name: sample7.dest1' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' source: pfile:!!{hive.exec.scratchdir}!!' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 303 rows selected >>> >>> INSERT OVERWRITE TABLE dest1 SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 4 on key) s WHERE s.key > 100; 'key','value' No rows selected >>> >>> SELECT dest1.* FROM dest1; 'key','value' '468','val_469' '272','val_273' '448','val_449' '440','val_441' '296','val_297' '428','val_429' '356','val_357' '128','val_129' '240','val_241' '408','val_409' '476','val_477' '424','val_425' '488','val_489' '128','val_129' '468','val_469' '224','val_225' '344','val_345' '304','val_305' '264','val_265' '196','val_197' '492','val_493' '360','val_361' '492','val_493' '376','val_377' '120','val_121' '132','val_133' '388','val_389' '184','val_185' '284','val_285' '352','val_353' '328','val_329' '480','val_481' '480','val_481' '392','val_393' '476','val_477' '252','val_253' '264','val_265' '336','val_337' '340','val_341' '484','val_485' '260','val_261' '164','val_165' '104','val_105' '140','val_141' '212','val_213' '308','val_309' '416','val_417' '364','val_365' '168','val_169' '384','val_385' '324','val_325' '404','val_405' '260','val_261' '328','val_329' '404','val_405' '384','val_385' '116','val_117' '104','val_105' '132','val_133' '192','val_193' '356','val_357' '352','val_353' '160','val_161' '412','val_413' '204','val_205' '216','val_217' '196','val_197' '384','val_385' '404','val_405' '300','val_301' '268','val_269' '392','val_393' '104','val_105' '436','val_437' '156','val_157' '172','val_173' '244','val_245' '284','val_285' '164','val_165' '136','val_137' '432','val_433' '496','val_497' '144','val_145' '408','val_409' '152','val_153' '348','val_349' '292','val_293' '152','val_153' '256','val_257' '292','val_293' '412','val_413' '156','val_157' '228','val_229' '248','val_249' '244','val_245' '276','val_277' '196','val_197' '440','val_441' '308','val_309' '468','val_469' '152','val_153' '300','val_301' '244','val_245' '484','val_484' '224','val_224' '128','val_128' '152','val_152' '252','val_252' '292','val_292' '208','val_208' '396','val_396' '128','val_128' '316','val_316' '280','val_280' '208','val_208' '356','val_356' '192','val_192' '176','val_176' '216','val_216' '176','val_176' '332','val_332' '180','val_180' '284','val_284' '260','val_260' '404','val_404' '384','val_384' '272','val_272' '348','val_348' '208','val_208' '348','val_348' '172','val_172' '496','val_496' '468','val_468' '120','val_120' '404','val_404' '436','val_436' '156','val_156' '468','val_468' '308','val_308' '196','val_196' '288','val_288' '316','val_316' '364','val_364' '224','val_224' '392','val_392' '272','val_272' '452','val_452' '396','val_396' '336','val_336' '168','val_168' '472','val_472' '160','val_160' '492','val_492' '228','val_228' '468','val_468' '368','val_368' '296','val_296' '216','val_216' '344','val_344' '116','val_116' '256','val_256' '480','val_480' '288','val_288' '244','val_244' '128','val_128' '432','val_432' '316','val_316' '280','val_280' '104','val_104' '348','val_348' '424','val_424' '396','val_396' '164','val_164' '164','val_164' '424','val_424' '480','val_480' '104','val_104' '200','val_200' '360','val_360' '248','val_248' '444','val_444' '120','val_120' '468','val_468' '460','val_460' '480','val_480' '136','val_136' '172','val_172' '384','val_384' '256','val_256' '384','val_384' '492','val_492' '348','val_348' '344','val_344' '448','val_448' '152','val_152' '348','val_348' '400','val_400' '200','val_200' 198 rows selected >>> !record