PREHOOK: query: create table combine2(key string) partitioned by (value string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table combine2(key string) partitioned by (value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@combine2 PREHOOK: query: insert overwrite table combine2 partition(value) select * from ( select key, value from src where key < 10 union all select key, '|' as value from src where key = 11 union all select key, '2010-04-21 09:45:00' value from src where key = 19) s PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@combine2 POSTHOOK: query: insert overwrite table combine2 partition(value) select * from ( select key, value from src where key < 10 union all select key, '|' as value from src where key = 11 union all select key, '2010-04-21 09:45:00' value from src where key = 19) s POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Output: default@combine2@value=val_0 POSTHOOK: Output: default@combine2@value=val_2 POSTHOOK: Output: default@combine2@value=val_4 POSTHOOK: Output: default@combine2@value=val_5 POSTHOOK: Output: default@combine2@value=val_8 POSTHOOK: Output: default@combine2@value=val_9 POSTHOOK: Output: default@combine2@value=| POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: show partitions combine2 PREHOOK: type: SHOWPARTITIONS POSTHOOK: query: show partitions combine2 POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] value=2010-04-21 09%3A45%3A00 value=val_0 value=val_2 value=val_4 value=val_5 value=val_8 value=val_9 value=| PREHOOK: query: explain select key, value from combine2 where value is not null order by key PREHOOK: type: QUERY POSTHOOK: query: explain select key, value from combine2 where value is not null order by key POSTHOOK: type: QUERY POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME combine2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: combine2 TableScan alias: combine2 Filter Operator predicate: expr: value is not null type: boolean Select Operator expressions: expr: key type: string expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value from combine2 where value is not null order by key PREHOOK: type: QUERY PREHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 PREHOOK: Input: default@combine2@value=val_0 PREHOOK: Input: default@combine2@value=val_2 PREHOOK: Input: default@combine2@value=val_4 PREHOOK: Input: default@combine2@value=val_5 PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-36_801_7329954255470641875/-mr-10000 POSTHOOK: query: select key, value from combine2 where value is not null order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Input: default@combine2@value=val_0 POSTHOOK: Input: default@combine2@value=val_2 POSTHOOK: Input: default@combine2@value=val_4 POSTHOOK: Input: default@combine2@value=val_5 POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-36_801_7329954255470641875/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 11 | 19 2010-04-21 09:45:00 2 val_2 4 val_4 5 val_5 5 val_5 5 val_5 8 val_8 9 val_9 PREHOOK: query: explain extended select count(1) from combine2 where value is not null PREHOOK: type: QUERY POSTHOOK: query: explain extended select count(1) from combine2 where value is not null POSTHOOK: type: QUERY POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME combine2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL value))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: combine2 TableScan alias: combine2 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: value is not null type: boolean Select Operator Group By Operator aggregations: expr: count(1) bucketGroup: false mode: hash outputColumnNames: _col0 Reduce Output Operator sort order: tag: -1 value expressions: expr: _col0 type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_0 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_2 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_4 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_5 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_8 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_9 [combine2] pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=| [combine2] Path -> Partition: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 Partition base file name: value=2010-04-21 09%3A45%3A00 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value 2010-04-21 09:45:00 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_0 Partition base file name: value=val_0 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_0 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_0 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_2 Partition base file name: value=val_2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_2 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_4 Partition base file name: value=val_4 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_4 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_4 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_5 Partition base file name: value=val_5 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_5 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_5 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_8 Partition base file name: value=val_8 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_8 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_8 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_9 Partition base file name: value=val_9 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value val_9 properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=val_9 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=| Partition base file name: value=| input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: value | properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=| name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key columns.types string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2 name default.combine2 numFiles 8 numPartitions 8 numRows 12 partition_columns value serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 26 transient_lastDdlTime 1297330836 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 name: default.combine2 Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false mode: mergepartial outputColumnNames: _col0 Select Operator expressions: expr: _col0 type: bigint outputColumnNames: _col0 File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/sdong/hive_2011-02-10_01-40-41_145_7466410298733488021/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_01-40-41_145_7466410298733488021/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0 columns.types bigint serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select count(1) from combine2 where value is not null PREHOOK: type: QUERY PREHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 PREHOOK: Input: default@combine2@value=val_0 PREHOOK: Input: default@combine2@value=val_2 PREHOOK: Input: default@combine2@value=val_4 PREHOOK: Input: default@combine2@value=val_5 PREHOOK: Input: default@combine2@value=val_8 PREHOOK: Input: default@combine2@value=val_9 PREHOOK: Input: default@combine2@value=| PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-41_363_7045672378046976890/-mr-10000 POSTHOOK: query: select count(1) from combine2 where value is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@combine2@value=2010-04-21 09%3A45%3A00 POSTHOOK: Input: default@combine2@value=val_0 POSTHOOK: Input: default@combine2@value=val_2 POSTHOOK: Input: default@combine2@value=val_4 POSTHOOK: Input: default@combine2@value=val_5 POSTHOOK: Input: default@combine2@value=val_8 POSTHOOK: Input: default@combine2@value=val_9 POSTHOOK: Input: default@combine2@value=| POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-41_363_7045672378046976890/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] 12 PREHOOK: query: explain select ds, count(1) from srcpart where ds is not null group by ds PREHOOK: type: QUERY POSTHOOK: query: explain select ds, count(1) from srcpart where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (TOK_FUNCTION TOK_ISNOTNULL (TOK_TABLE_OR_COL ds))) (TOK_GROUPBY (TOK_TABLE_OR_COL ds)))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: srcpart TableScan alias: srcpart Filter Operator predicate: expr: ds is not null type: boolean Select Operator expressions: expr: ds type: string outputColumnNames: ds Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: ds type: string mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: string mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: string expr: _col1 type: bigint outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 0 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds PREHOOK: type: QUERY PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-45_760_3016201461459323584/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpart where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-40-45_760_3016201461459323584/-mr-10000 POSTHOOK: Lineage: combine2 PARTITION(value=2010-04-21 09:45:00).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_0).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_4).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_5).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_8).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=val_9).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: combine2 PARTITION(value=|).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:key, type:string, comment:default), ] 2008-04-08 1000 2008-04-09 1000