Saving all output to "!!{outputDirectory}!!/rand_partitionpruner1.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/rand_partitionpruner1.q >>> -- scanning un-partitioned data >>> explain extended select * from src where rand(1) < 0.1; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_FUNCTION rand 1) 0.1))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' GatherStats: false' ' Filter Operator' ' isSamplingPred: false' ' predicate:' ' expr: (rand(1) < 0.1)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: file:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' Stats Publishing Key Prefix: file:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' columns _col0,_col1' ' columns.types string:string' ' escape.delim \' ' serialization.format 1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' ' Needs Tagging: false' ' Path -> Alias:' ' !!{hive.metastore.warehouse.dir}!!/rand_partitionpruner1.db/src [src]' ' Path -> Partition:' ' !!{hive.metastore.warehouse.dir}!!/rand_partitionpruner1.db/src ' ' Partition' ' base file name: src' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/rand_partitionpruner1.db/src' ' name rand_partitionpruner1.src' ' numFiles 1' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct src { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 5812' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/rand_partitionpruner1.db/src' ' name rand_partitionpruner1.src' ' numFiles 1' ' numPartitions 0' ' numRows 0' ' rawDataSize 0' ' serialization.ddl struct src { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 5812' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: rand_partitionpruner1.src' ' name: rand_partitionpruner1.src' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 100 rows selected >>> select * from src where rand(1) < 0.1; 'key','value' '409','val_409' '429','val_429' '209','val_209' '153','val_153' '203','val_203' '170','val_170' '489','val_489' '378','val_378' '221','val_221' '498','val_498' '469','val_469' '176','val_176' '176','val_176' '384','val_384' '217','val_217' '431','val_431' '51','val_51' '288','val_288' '457','val_457' '197','val_197' '77','val_77' '138','val_138' '277','val_277' '224','val_224' '309','val_309' '389','val_389' '331','val_331' '317','val_317' '336','val_336' '42','val_42' '458','val_458' '78','val_78' '453','val_453' '74','val_74' '103','val_103' '467','val_467' '202','val_202' '469','val_469' '44','val_44' '454','val_454' '70','val_70' '491','val_491' '199','val_199' '169','val_169' '310','val_310' '233','val_233' '133','val_133' '26','val_26' '134','val_134' '18','val_18' '298','val_298' '348','val_348' '469','val_469' '37','val_37' '152','val_152' '400','val_400' 56 rows selected >>> !record