Saving all output to "!!{outputDirectory}!!/transform_ppr2.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/transform_ppr2.q >>> set hive.optimize.ppd=true; No rows affected >>> >>> EXPLAIN EXTENDED FROM ( FROM srcpart src SELECT TRANSFORM(src.ds, src.key, src.value) USING 'cat' AS (ds, tkey, tvalue) WHERE src.ds = '2008-04-08' CLUSTER BY tkey ) tmap SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart) src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) ds) (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST ds tkey tvalue)))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' tmap:src ' ' TableScan' ' alias: src' ' GatherStats: false' ' Select Operator' ' expressions:' ' expr: ds' ' type: string' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' Transform Operator' ' command: cat' ' output info:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' columns _col0,_col1,_col2' ' columns.types string,string,string' ' field.delim 9' ' serialization.format 9' ' Filter Operator' ' isSamplingPred: false' ' predicate:' ' expr: (_col1 < 100.0)' ' type: boolean' ' Reduce Output Operator' ' key expressions:' ' expr: _col1' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col1' ' type: string' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' Needs Tagging: false' ' Path -> Alias:' ' !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=11 [tmap:src]' ' !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=12 [tmap:src]' ' Path -> Partition:' ' !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=11 ' ' Partition' ' base file name: hr=11' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' partition values:' ' ds 2008-04-08' ' hr 11' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=11' ' name transform_ppr2.srcpart' ' numFiles 1' ' numPartitions 4' ' numRows 0' ' partition_columns ds/hr' ' rawDataSize 0' ' serialization.ddl struct srcpart { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 5812' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart' ' name transform_ppr2.srcpart' ' numFiles 4' ' numPartitions 4' ' numRows 0' ' partition_columns ds/hr' ' rawDataSize 0' ' serialization.ddl struct srcpart { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 23248' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: transform_ppr2.srcpart' ' name: transform_ppr2.srcpart' ' !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=12 ' ' Partition' ' base file name: hr=12' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' partition values:' ' ds 2008-04-08' ' hr 12' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart/ds=2008-04-08/hr=12' ' name transform_ppr2.srcpart' ' numFiles 1' ' numPartitions 4' ' numRows 0' ' partition_columns ds/hr' ' rawDataSize 0' ' serialization.ddl struct srcpart { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 5812' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' ' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' bucket_count -1' ' columns key,value' ' columns.types string:string' ' file.inputformat org.apache.hadoop.mapred.TextInputFormat' ' file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' location !!{hive.metastore.warehouse.dir}!!/transform_ppr2.db/srcpart' ' name transform_ppr2.srcpart' ' numFiles 4' ' numPartitions 4' ' numRows 0' ' partition_columns ds/hr' ' rawDataSize 0' ' serialization.ddl struct srcpart { string key, string value}' ' serialization.format 1' ' serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' totalSize 23248' ' transient_lastDdlTime !!UNIXTIME!!' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: transform_ppr2.srcpart' ' name: transform_ppr2.srcpart' ' Reduce Operator Tree:' ' Extract' ' Select Operator' ' expressions:' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' directory: file:!!{hive.exec.scratchdir}!!' ' NumFilesPerFileSink: 1' ' Stats Publishing Key Prefix: file:!!{hive.exec.scratchdir}!!' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' properties:' ' columns _col0,_col1' ' columns.types string:string' ' escape.delim \' ' serialization.format 1' ' TotalFiles: 1' ' GatherStats: false' ' MultiFileSpray: false' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 194 rows selected >>> >>> FROM ( FROM srcpart src SELECT TRANSFORM(src.ds, src.key, src.value) USING 'cat' AS (ds, tkey, tvalue) WHERE src.ds = '2008-04-08' CLUSTER BY tkey ) tmap SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; 'tkey','tvalue' '0','val_0' '0','val_0' '0','val_0' '0','val_0' '0','val_0' '0','val_0' '10','val_10' '10','val_10' '11','val_11' '11','val_11' '12','val_12' '12','val_12' '12','val_12' '12','val_12' '15','val_15' '15','val_15' '15','val_15' '15','val_15' '17','val_17' '17','val_17' '18','val_18' '18','val_18' '18','val_18' '18','val_18' '19','val_19' '19','val_19' '2','val_2' '2','val_2' '20','val_20' '20','val_20' '24','val_24' '24','val_24' '24','val_24' '24','val_24' '26','val_26' '26','val_26' '26','val_26' '26','val_26' '27','val_27' '27','val_27' '28','val_28' '28','val_28' '30','val_30' '30','val_30' '33','val_33' '33','val_33' '34','val_34' '34','val_34' '35','val_35' '35','val_35' '35','val_35' '35','val_35' '35','val_35' '35','val_35' '37','val_37' '37','val_37' '37','val_37' '37','val_37' '4','val_4' '4','val_4' '41','val_41' '41','val_41' '42','val_42' '42','val_42' '42','val_42' '42','val_42' '43','val_43' '43','val_43' '44','val_44' '44','val_44' '47','val_47' '47','val_47' '5','val_5' '5','val_5' '5','val_5' '5','val_5' '5','val_5' '5','val_5' '51','val_51' '51','val_51' '51','val_51' '51','val_51' '53','val_53' '53','val_53' '54','val_54' '54','val_54' '57','val_57' '57','val_57' '58','val_58' '58','val_58' '58','val_58' '58','val_58' '64','val_64' '64','val_64' '65','val_65' '65','val_65' '66','val_66' '66','val_66' '67','val_67' '67','val_67' '67','val_67' '67','val_67' '69','val_69' '69','val_69' '70','val_70' '70','val_70' '70','val_70' '70','val_70' '70','val_70' '70','val_70' '72','val_72' '72','val_72' '72','val_72' '72','val_72' '74','val_74' '74','val_74' '76','val_76' '76','val_76' '76','val_76' '76','val_76' '77','val_77' '77','val_77' '78','val_78' '78','val_78' '8','val_8' '8','val_8' '80','val_80' '80','val_80' '82','val_82' '82','val_82' '83','val_83' '83','val_83' '83','val_83' '83','val_83' '84','val_84' '84','val_84' '84','val_84' '84','val_84' '85','val_85' '85','val_85' '86','val_86' '86','val_86' '87','val_87' '87','val_87' '9','val_9' '9','val_9' '90','val_90' '90','val_90' '90','val_90' '90','val_90' '90','val_90' '90','val_90' '92','val_92' '92','val_92' '95','val_95' '95','val_95' '95','val_95' '95','val_95' '96','val_96' '96','val_96' '97','val_97' '97','val_97' '97','val_97' '97','val_97' '98','val_98' '98','val_98' '98','val_98' '98','val_98' 168 rows selected >>> >>> !record