PREHOOK: query: EXPLAIN EXTENDED FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN EXTENDED FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100 POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_extract (. (TOK_TABLE_OR_COL tmap) value) 'val_(\\d+\\t\\d+)' 1))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100)))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: tmap:src TableScan alias: src GatherStats: false Select Operator expressions: expr: key type: string expr: value type: string expr: (1 + 2) type: int expr: (3 + 4) type: int outputColumnNames: _col0, _col1, _col2, _col3 Transform Operator command: /bin/cat output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types string,string field.delim 9 serialization.format 9 serialization.last.column.takes.rest true Filter Operator isSamplingPred: false predicate: expr: (_col0 < 100) type: boolean Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] Path -> Partition: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src name default.src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1297378968 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src name default.src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1297378968 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src Reduce Operator Tree: Extract Select Operator expressions: expr: _col0 type: string expr: regexp_extract(_col1, 'val_(\d+\t\d+)', 1) type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 0 directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types string:string serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-39_518_2112543687006032682/-mr-10000 POSTHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-39_518_2112543687006032682/-mr-10000 0 0 3 0 0 3 0 0 3 10 10 3 11 11 3 12 12 3 12 12 3 15 15 3 15 15 3 17 17 3 18 18 3 18 18 3 19 19 3 2 2 3 20 20 3 24 24 3 24 24 3 26 26 3 26 26 3 27 27 3 28 28 3 30 30 3 33 33 3 34 34 3 35 35 3 35 35 3 35 35 3 37 37 3 37 37 3 4 4 3 41 41 3 42 42 3 42 42 3 43 43 3 44 44 3 47 47 3 5 5 3 5 5 3 5 5 3 51 51 3 51 51 3 53 53 3 54 54 3 57 57 3 58 58 3 58 58 3 64 64 3 65 65 3 66 66 3 67 67 3 67 67 3 69 69 3 70 70 3 70 70 3 70 70 3 72 72 3 72 72 3 74 74 3 76 76 3 76 76 3 77 77 3 78 78 3 8 8 3 80 80 3 82 82 3 83 83 3 83 83 3 84 84 3 84 84 3 85 85 3 86 86 3 87 87 3 9 9 3 90 90 3 90 90 3 90 90 3 92 92 3 95 95 3 95 95 3 96 96 3 97 97 3 97 97 3 98 98 3 98 98 3 PREHOOK: query: EXPLAIN EXTENDED FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN EXTENDED FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100 POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_extract (. (TOK_TABLE_OR_COL tmap) value) 'val_(\\d+\\t\\d+)'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100)))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: tmap:src TableScan alias: src GatherStats: false Select Operator expressions: expr: key type: string expr: value type: string expr: (1 + 2) type: int expr: (3 + 4) type: int outputColumnNames: _col0, _col1, _col2, _col3 Transform Operator command: /bin/cat output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types string,string field.delim 9 serialization.format 9 serialization.last.column.takes.rest true Filter Operator isSamplingPred: false predicate: expr: (_col0 < 100) type: boolean Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] Path -> Partition: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src Partition base file name: src input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src name default.src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1297378968 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src name default.src serialization.ddl struct src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1297378968 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src name: default.src Reduce Operator Tree: Extract Select Operator expressions: expr: _col0 type: string expr: regexp_extract(_col1, 'val_(\d+\t\d+)') type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 0 directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types string:string serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-43_099_3379262351848426372/-mr-10000 POSTHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value, 1+2, 3+4) USING '/bin/cat' CLUSTER BY key ) tmap SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-01-43_099_3379262351848426372/-mr-10000 0 0 3 0 0 3 0 0 3 10 10 3 11 11 3 12 12 3 12 12 3 15 15 3 15 15 3 17 17 3 18 18 3 18 18 3 19 19 3 2 2 3 20 20 3 24 24 3 24 24 3 26 26 3 26 26 3 27 27 3 28 28 3 30 30 3 33 33 3 34 34 3 35 35 3 35 35 3 35 35 3 37 37 3 37 37 3 4 4 3 41 41 3 42 42 3 42 42 3 43 43 3 44 44 3 47 47 3 5 5 3 5 5 3 5 5 3 51 51 3 51 51 3 53 53 3 54 54 3 57 57 3 58 58 3 58 58 3 64 64 3 65 65 3 66 66 3 67 67 3 67 67 3 69 69 3 70 70 3 70 70 3 70 70 3 72 72 3 72 72 3 74 74 3 76 76 3 76 76 3 77 77 3 78 78 3 8 8 3 80 80 3 82 82 3 83 83 3 83 83 3 84 84 3 84 84 3 85 85 3 86 86 3 87 87 3 9 9 3 90 90 3 90 90 3 90 90 3 92 92 3 95 95 3 95 95 3 96 96 3 97 97 3 97 97 3 98 98 3 98 98 3