PREHOOK: query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 PREHOOK: query: EXPLAIN EXTENDED FROM srcpart src INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) WHERE src.ds = '2008-04-08' GROUP BY substr(src.key,1,1) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN EXTENDED FROM srcpart src INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) WHERE src.ds = '2008-04-08' GROUP BY substr(src.key,1,1) POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1) (TOK_FUNCTION sum (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) value) 5))))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) ds) '2008-04-08')) (TOK_GROUPBY (TOK_FUNCTION substr (. (TOK_TABLE_OR_COL src) key) 1 1)))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: src TableScan alias: src Filter Operator isSamplingPred: false predicate: expr: (ds = '2008-04-08') type: boolean Filter Operator isSamplingPred: false predicate: expr: (ds = '2008-04-08') type: boolean Select Operator expressions: expr: key type: string expr: value type: string outputColumnNames: key, value Reduce Output Operator key expressions: expr: substr(key, 1, 1) type: string expr: substr(value, 5) type: string sort order: ++ Map-reduce partition columns: expr: substr(key, 1, 1) type: string tag: -1 Needs Tagging: false Path -> Alias: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src] Path -> Partition: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 11 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516411 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516411 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 12 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516411 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/srcpart name srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516411 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: srcpart name: srcpart Reduce Operator Tree: Group By Operator aggregations: expr: count(DISTINCT KEY._col1) expr: sum(KEY._col1) bucketGroup: false keys: expr: KEY._col0 type: string mode: complete outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col0 type: string expr: _col1 type: bigint expr: concat(_col0, _col2) type: string outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col0 type: string expr: UDFToInteger(_col1) type: int expr: _col2 type: string outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false GlobalTableId: 1 directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-13-32_566_7927327298810422170/10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,c1,c2 columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516412 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 TotalFiles: 1 MultiFileSpray: false Stage: Stage-0 Move Operator tables: replace: true source: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-13-32_566_7927327298810422170/10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,c1,c2 columns.types string:int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/dest1 name dest1 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1270516412 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: dest1 tmp directory: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-13-32_566_7927327298810422170/10001 PREHOOK: query: FROM srcpart src INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) WHERE src.ds = '2008-04-08' GROUP BY substr(src.key,1,1) PREHOOK: type: QUERY PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Output: default@dest1 POSTHOOK: query: FROM srcpart src INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))) WHERE src.ds = '2008-04-08' GROUP BY substr(src.key,1,1) POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: dest1.c1 EXPRESSION [(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] POSTHOOK: Lineage: dest1.c2 EXPRESSION [(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-13-37_250_7713232823116205808/10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-13-37_250_7713232823116205808/10000 POSTHOOK: Lineage: dest1.c1 EXPRESSION [(srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] POSTHOOK: Lineage: dest1.c2 EXPRESSION [(srcpart)src.FieldSchema(name:ds, type:string, comment:null), (srcpart)src.FieldSchema(name:hr, type:string, comment:null), ] POSTHOOK: Lineage: dest1.key EXPRESSION [(srcpart)src.FieldSchema(name:ds, type:string, comment:null), ] 0 1 00.0 1 71 132828.0 2 69 251142.0 3 62 364008.0 4 74 4105526.0 5 6 5794.0 6 5 6796.0 7 6 71470.0 8 8 81524.0 9 7 92094.0