PREHOOK: query: drop table pcr_t1 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table pcr_t1 POSTHOOK: type: DROPTABLE PREHOOK: query: drop table pcr_t2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table pcr_t2 POSTHOOK: type: DROPTABLE PREHOOK: query: drop table pcr_t3 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table pcr_t3 POSTHOOK: type: DROPTABLE PREHOOK: query: create table pcr_t1 (key int, value string) partitioned by (ds string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table pcr_t1 (key int, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@pcr_t1 PREHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-08') select * from src where key < 20 order by key PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@pcr_t1@ds=2000-04-08 POSTHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-08') select * from src where key < 20 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@pcr_t1@ds=2000-04-08 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-09') select * from src where key < 20 order by key PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@pcr_t1@ds=2000-04-09 POSTHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-09') select * from src where key < 20 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@pcr_t1@ds=2000-04-09 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-10') select * from src where key < 20 order by key PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@pcr_t1@ds=2000-04-10 POSTHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-10') select * from src where key < 20 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@pcr_t1@ds=2000-04-10 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain extended select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 order by key, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 order by key, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (and (<= (TOK_TABLE_OR_COL ds) '2000-04-09') (< (TOK_TABLE_OR_COL key) 5))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (key < 5) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col2 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-21_129_4065046827555412953/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-21_129_4065046827555412953/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 order by key, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-21_189_928400507516888386/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 order by key, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-21_189_928400507516888386/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 2 val_2 2000-04-08 2 val_2 2000-04-09 4 val_4 2000-04-08 4 val_4 2000-04-09 PREHOOK: query: explain extended select key, value from pcr_t1 where ds<='2000-04-09' or key<5 order by key PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value from pcr_t1 where ds<='2000-04-09' or key<5 order by key POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (or (<= (TOK_TABLE_OR_COL ds) '2000-04-09') (< (TOK_TABLE_OR_COL key) 5))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: ((ds <= '2000-04-09') or (key < 5)) type: boolean Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: int sort order: + tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-23_865_6506623963862014221/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-23_865_6506623963862014221/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types int:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value from pcr_t1 where ds<='2000-04-09' or key<5 order by key PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-23_926_1611813292250503932/-mr-10000 POSTHOOK: query: select key, value from pcr_t1 where ds<='2000-04-09' or key<5 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-23_926_1611813292250503932/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 2 val_2 2 val_2 2 val_2 4 val_4 4 val_4 4 val_4 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 8 val_8 8 val_8 9 val_9 9 val_9 10 val_10 10 val_10 11 val_11 11 val_11 12 val_12 12 val_12 12 val_12 12 val_12 15 val_15 15 val_15 15 val_15 15 val_15 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 19 val_19 19 val_19 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 and value != 'val_2' order by key, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 and value != 'val_2' order by key, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (and (and (<= (TOK_TABLE_OR_COL ds) '2000-04-09') (< (TOK_TABLE_OR_COL key) 5)) (!= (TOK_TABLE_OR_COL value) 'val_2'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: ((key < 5) and (value <> 'val_2')) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col2 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-26_679_5171780899276979732/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-26_679_5171780899276979732/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 and value != 'val_2' order by key, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-26_734_1964916511399680084/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where ds<='2000-04-09' and key<5 and value != 'val_2' order by key, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-26_734_1964916511399680084/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 4 val_4 2000-04-08 4 val_4 2000-04-09 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-09' and key < 5) or (ds > '2000-04-09' and value == 'val_5') order by key, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-09' and key < 5) or (ds > '2000-04-09' and value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (or (and (< (TOK_TABLE_OR_COL ds) '2000-04-09') (< (TOK_TABLE_OR_COL key) 5)) (and (> (TOK_TABLE_OR_COL ds) '2000-04-09') (== (TOK_TABLE_OR_COL value) 'val_5')))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col2 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-29_372_2124753264258245021/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-29_372_2124753264258245021/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-09' and key < 5) or (ds > '2000-04-09' and value == 'val_5') order by key, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-29_446_9139902346775639073/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-09' and key < 5) or (ds > '2000-04-09' and value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-29_446_9139902346775639073/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 2 val_2 2000-04-08 4 val_4 2000-04-08 5 val_5 2000-04-10 5 val_5 2000-04-10 5 val_5 2000-04-10 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-10' and key < 5) or (ds > '2000-04-08' and value == 'val_5') order by key, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-10' and key < 5) or (ds > '2000-04-08' and value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (or (and (< (TOK_TABLE_OR_COL ds) '2000-04-10') (< (TOK_TABLE_OR_COL key) 5)) (and (> (TOK_TABLE_OR_COL ds) '2000-04-08') (== (TOK_TABLE_OR_COL value) 'val_5')))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col2 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-32_131_4848675280784439482/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-32_131_4848675280784439482/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-10' and key < 5) or (ds > '2000-04-08' and value == 'val_5') order by key, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-32_192_4474962480767634565/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-10' and key < 5) or (ds > '2000-04-08' and value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-32_192_4474962480767634565/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 2 val_2 2000-04-08 2 val_2 2000-04-09 4 val_4 2000-04-08 4 val_4 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-10 5 val_5 2000-04-10 5 val_5 2000-04-10 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-10' or key < 5) and (ds > '2000-04-08' or value == 'val_5') order by key, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds < '2000-04-10' or key < 5) and (ds > '2000-04-08' or value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (and (or (< (TOK_TABLE_OR_COL ds) '2000-04-10') (< (TOK_TABLE_OR_COL key) 5)) (or (> (TOK_TABLE_OR_COL ds) '2000-04-08') (== (TOK_TABLE_OR_COL value) 'val_5')))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col2 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-34_889_6668013612120955079/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-34_889_6668013612120955079/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-10' or key < 5) and (ds > '2000-04-08' or value == 'val_5') order by key, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-34_951_6978641764651819716/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds < '2000-04-10' or key < 5) and (ds > '2000-04-08' or value == 'val_5') order by key, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-34_951_6978641764651819716/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-10 0 val_0 2000-04-10 0 val_0 2000-04-10 2 val_2 2000-04-09 2 val_2 2000-04-10 4 val_4 2000-04-09 4 val_4 2000-04-10 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-09 8 val_8 2000-04-09 9 val_9 2000-04-09 10 val_10 2000-04-09 11 val_11 2000-04-09 12 val_12 2000-04-09 12 val_12 2000-04-09 15 val_15 2000-04-09 15 val_15 2000-04-09 17 val_17 2000-04-09 18 val_18 2000-04-09 18 val_18 2000-04-09 19 val_19 2000-04-09 PREHOOK: query: explain extended select key, value from pcr_t1 where (ds='2000-04-08' or ds='2000-04-09') and key=14 order by key, value PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value from pcr_t1 where (ds='2000-04-08' or ds='2000-04-09') and key=14 order by key, value POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (or (= (TOK_TABLE_OR_COL ds) '2000-04-08') (= (TOK_TABLE_OR_COL ds) '2000-04-09')) (= (TOK_TABLE_OR_COL key) 14))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (key = 14) type: boolean Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-37_712_2575302228085329340/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-37_712_2575302228085329340/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types int:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value from pcr_t1 where (ds='2000-04-08' or ds='2000-04-09') and key=14 order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-37_773_2998616591696876714/-mr-10000 POSTHOOK: query: select key, value from pcr_t1 where (ds='2000-04-08' or ds='2000-04-09') and key=14 order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-37_773_2998616591696876714/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain extended select key, value from pcr_t1 where ds='2000-04-08' or ds='2000-04-09' order by key, value PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value from pcr_t1 where ds='2000-04-08' or ds='2000-04-09' order by key, value POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (or (= (TOK_TABLE_OR_COL ds) '2000-04-08') (= (TOK_TABLE_OR_COL ds) '2000-04-09'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-40_463_8266098016871150118/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-40_463_8266098016871150118/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types int:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value from pcr_t1 where ds='2000-04-08' or ds='2000-04-09' order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-40_524_2597517539449159730/-mr-10000 POSTHOOK: query: select key, value from pcr_t1 where ds='2000-04-08' or ds='2000-04-09' order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-40_524_2597517539449159730/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 2 val_2 2 val_2 4 val_4 4 val_4 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 8 val_8 8 val_8 9 val_9 9 val_9 10 val_10 10 val_10 11 val_11 11 val_11 12 val_12 12 val_12 12 val_12 12 val_12 15 val_15 15 val_15 15 val_15 15 val_15 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 19 val_19 19 val_19 PREHOOK: query: explain extended select key, value from pcr_t1 where ds>='2000-04-08' or ds<'2000-04-10' order by key, value PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value from pcr_t1 where ds>='2000-04-08' or ds<'2000-04-10' order by key, value POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (or (>= (TOK_TABLE_OR_COL ds) '2000-04-08') (< (TOK_TABLE_OR_COL ds) '2000-04-10'))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string sort order: ++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-43_193_5744597926147610591/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-43_193_5744597926147610591/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types int:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value from pcr_t1 where ds>='2000-04-08' or ds<'2000-04-10' order by key, value PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-43_252_4071559680842866158/-mr-10000 POSTHOOK: query: select key, value from pcr_t1 where ds>='2000-04-08' or ds<'2000-04-10' order by key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-43_252_4071559680842866158/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 0 val_0 2 val_2 2 val_2 2 val_2 4 val_4 4 val_4 4 val_4 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 5 val_5 8 val_8 8 val_8 8 val_8 9 val_9 9 val_9 9 val_9 10 val_10 10 val_10 10 val_10 11 val_11 11 val_11 11 val_11 12 val_12 12 val_12 12 val_12 12 val_12 12 val_12 12 val_12 15 val_15 15 val_15 15 val_15 15 val_15 15 val_15 15 val_15 17 val_17 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 19 val_19 19 val_19 19 val_19 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (or (and (= (TOK_TABLE_OR_COL ds) '2000-04-08') (= (TOK_TABLE_OR_COL key) 1)) (and (= (TOK_TABLE_OR_COL ds) '2000-04-09') (= (TOK_TABLE_OR_COL key) 2)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string sort order: +++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-45_996_195479673300228897/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-45_996_195479673300228897/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-46_058_7061582212109052102/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-46_058_7061582212109052102/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 2 val_2 2000-04-09 PREHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' order by t1.key PREHOOK: type: QUERY POSTHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' order by t1.key POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME pcr_t1) t1) (TOK_TABREF (TOK_TABNAME pcr_t1) t2) (and (and (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)) (= (. (TOK_TABLE_OR_COL t1) ds) '2000-04-08')) (= (. (TOK_TABLE_OR_COL t2) ds) '2000-04-08')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL t1) key))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-2 depends on stages: Stage-1 Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: t1 TableScan alias: t1 GatherStats: false Reduce Output Operator key expressions: expr: key type: int sort order: + Map-reduce partition columns: expr: key type: int tag: 0 value expressions: expr: key type: int expr: value type: string expr: ds type: string t2 TableScan alias: t2 GatherStats: false Reduce Output Operator key expressions: expr: key type: int sort order: + Map-reduce partition columns: expr: key type: int tag: 1 value expressions: expr: key type: int expr: value type: string expr: ds type: string Needs Tagging: true Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t2, t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 Select Operator expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string expr: _col5 type: int expr: _col6 type: string expr: _col7 type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-mr-10002 Reduce Output Operator key expressions: expr: _col0 type: int sort order: + tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string expr: _col3 type: int expr: _col4 type: string expr: _col5 type: string Needs Tagging: false Path -> Alias: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-mr-10002 [file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-mr-10002] Path -> Partition: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-48_777_7397747697764876900/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int:string:string:int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' order by t1.key PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-48_855_4603566476605388516/-mr-10000 POSTHOOK: query: select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-08' order by t1.key POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-48_855_4603566476605388516/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 0 val_0 2000-04-08 2 val_2 2000-04-08 2 val_2 2000-04-08 4 val_4 2000-04-08 4 val_4 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 5 val_5 2000-04-08 8 val_8 2000-04-08 8 val_8 2000-04-08 9 val_9 2000-04-08 9 val_9 2000-04-08 10 val_10 2000-04-08 10 val_10 2000-04-08 11 val_11 2000-04-08 11 val_11 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 12 val_12 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 15 val_15 2000-04-08 17 val_17 2000-04-08 17 val_17 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 18 val_18 2000-04-08 19 val_19 2000-04-08 19 val_19 2000-04-08 PREHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' order by t1.key PREHOOK: type: QUERY POSTHOOK: query: explain extended select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' order by t1.key POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME pcr_t1) t1) (TOK_TABREF (TOK_TABNAME pcr_t1) t2) (and (and (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)) (= (. (TOK_TABLE_OR_COL t1) ds) '2000-04-08')) (= (. (TOK_TABLE_OR_COL t2) ds) '2000-04-09')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL t1) key))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-2 depends on stages: Stage-1 Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: t1 TableScan alias: t1 GatherStats: false Reduce Output Operator key expressions: expr: key type: int sort order: + Map-reduce partition columns: expr: key type: int tag: 0 value expressions: expr: key type: int expr: value type: string expr: ds type: string t2 TableScan alias: t2 GatherStats: false Reduce Output Operator key expressions: expr: key type: int sort order: + Map-reduce partition columns: expr: key type: int tag: 1 value expressions: expr: key type: int expr: value type: string expr: ds type: string Needs Tagging: true Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [t2] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 3 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 3 numPartitions 3 numRows 60 partition_columns ds rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 540 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 condition expressions: 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 Select Operator expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string expr: _col5 type: int expr: _col6 type: string expr: _col7 type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-mr-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-mr-10002 Reduce Output Operator key expressions: expr: _col0 type: int sort order: + tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string expr: _col3 type: int expr: _col4 type: string expr: _col5 type: string Needs Tagging: false Path -> Alias: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-mr-10002 [file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-mr-10002] Path -> Partition: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-mr-10002 Partition base file name: -mr-10002 input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int,string,string,int,string,string escape.delim \ Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-25-54_023_2165479406375168597/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2,_col3,_col4,_col5 columns.types int:string:string:int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' order by t1.key PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-54_115_2654671516063101710/-mr-10000 POSTHOOK: query: select * from pcr_t1 t1 join pcr_t1 t2 on t1.key=t2.key and t1.ds='2000-04-08' and t2.ds='2000-04-09' order by t1.key POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-25-54_115_2654671516063101710/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 0 val_0 2000-04-08 0 val_0 2000-04-09 2 val_2 2000-04-08 2 val_2 2000-04-09 4 val_4 2000-04-08 4 val_4 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 5 val_5 2000-04-08 5 val_5 2000-04-09 8 val_8 2000-04-08 8 val_8 2000-04-09 9 val_9 2000-04-08 9 val_9 2000-04-09 10 val_10 2000-04-08 10 val_10 2000-04-09 11 val_11 2000-04-08 11 val_11 2000-04-09 12 val_12 2000-04-08 12 val_12 2000-04-09 12 val_12 2000-04-08 12 val_12 2000-04-09 12 val_12 2000-04-08 12 val_12 2000-04-09 12 val_12 2000-04-08 12 val_12 2000-04-09 15 val_15 2000-04-08 15 val_15 2000-04-09 15 val_15 2000-04-08 15 val_15 2000-04-09 15 val_15 2000-04-08 15 val_15 2000-04-09 15 val_15 2000-04-08 15 val_15 2000-04-09 17 val_17 2000-04-08 17 val_17 2000-04-09 18 val_18 2000-04-08 18 val_18 2000-04-09 18 val_18 2000-04-08 18 val_18 2000-04-09 18 val_18 2000-04-08 18 val_18 2000-04-09 18 val_18 2000-04-08 18 val_18 2000-04-09 19 val_19 2000-04-08 19 val_19 2000-04-09 PREHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-11') select * from src where key < 20 order by key PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@pcr_t1@ds=2000-04-11 POSTHOOK: query: insert overwrite table pcr_t1 partition (ds='2000-04-11') select * from src where key < 20 order by key POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@pcr_t1@ds=2000-04-11 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds>='2000-04-08' and ds<='2000-04-11' and key=2) order by key, value, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds>='2000-04-08' and ds<='2000-04-11' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (or (and (> (TOK_TABLE_OR_COL ds) '2000-04-08') (< (TOK_TABLE_OR_COL ds) '2000-04-11')) (and (and (>= (TOK_TABLE_OR_COL ds) '2000-04-08') (<= (TOK_TABLE_OR_COL ds) '2000-04-11')) (= (TOK_TABLE_OR_COL key) 2)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (key = 2)) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string sort order: +++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-11 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-11 Partition base file name: ds=2000-04-11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-11 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-11 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-26-03_088_4372213643425745976/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-26-03_088_4372213643425745976/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds>='2000-04-08' and ds<='2000-04-11' and key=2) order by key, value, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Input: default@pcr_t1@ds=2000-04-11 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-03_156_9141465350125919150/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds>='2000-04-08' and ds<='2000-04-11' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Input: default@pcr_t1@ds=2000-04-11 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-03_156_9141465350125919150/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-10 0 val_0 2000-04-10 0 val_0 2000-04-10 2 val_2 2000-04-08 2 val_2 2000-04-09 2 val_2 2000-04-10 2 val_2 2000-04-11 4 val_4 2000-04-09 4 val_4 2000-04-10 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-10 5 val_5 2000-04-10 5 val_5 2000-04-10 8 val_8 2000-04-09 8 val_8 2000-04-10 9 val_9 2000-04-09 9 val_9 2000-04-10 10 val_10 2000-04-09 10 val_10 2000-04-10 11 val_11 2000-04-09 11 val_11 2000-04-10 12 val_12 2000-04-09 12 val_12 2000-04-09 12 val_12 2000-04-10 12 val_12 2000-04-10 15 val_15 2000-04-09 15 val_15 2000-04-09 15 val_15 2000-04-10 15 val_15 2000-04-10 17 val_17 2000-04-09 17 val_17 2000-04-10 18 val_18 2000-04-09 18 val_18 2000-04-09 18 val_18 2000-04-10 18 val_18 2000-04-10 19 val_19 2000-04-09 19 val_19 2000-04-10 PREHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds<='2000-04-09' and key=2) order by key, value, ds PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds<='2000-04-09' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds))) (TOK_WHERE (or (and (> (TOK_TABLE_OR_COL ds) '2000-04-08') (< (TOK_TABLE_OR_COL ds) '2000-04-11')) (and (<= (TOK_TABLE_OR_COL ds) '2000-04-09') (= (TOK_TABLE_OR_COL key) 2)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: ((ds > '2000-04-08') or ((ds <= '2000-04-09') and (key = 2))) type: boolean Select Operator expressions: expr: key type: int expr: value type: string expr: ds type: string outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string sort order: +++ tag: -1 value expressions: expr: _col0 type: int expr: _col1 type: string expr: _col2 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 [pcr_t1] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 Partition base file name: ds=2000-04-09 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-09 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-09 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931517 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 Partition base file name: ds=2000-04-10 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-10 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-10 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931521 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-26-05_957_2370478579747978891/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-26-05_957_2370478579747978891/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2 columns.types int:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds<='2000-04-09' and key=2) order by key, value, ds PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Input: default@pcr_t1@ds=2000-04-09 PREHOOK: Input: default@pcr_t1@ds=2000-04-10 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-06_020_9091114118060516641/-mr-10000 POSTHOOK: query: select key, value, ds from pcr_t1 where (ds>'2000-04-08' and ds<'2000-04-11') or (ds<='2000-04-09' and key=2) order by key, value, ds POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Input: default@pcr_t1@ds=2000-04-09 POSTHOOK: Input: default@pcr_t1@ds=2000-04-10 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-06_020_9091114118060516641/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-09 0 val_0 2000-04-10 0 val_0 2000-04-10 0 val_0 2000-04-10 2 val_2 2000-04-08 2 val_2 2000-04-09 2 val_2 2000-04-10 4 val_4 2000-04-09 4 val_4 2000-04-10 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-09 5 val_5 2000-04-10 5 val_5 2000-04-10 5 val_5 2000-04-10 8 val_8 2000-04-09 8 val_8 2000-04-10 9 val_9 2000-04-09 9 val_9 2000-04-10 10 val_10 2000-04-09 10 val_10 2000-04-10 11 val_11 2000-04-09 11 val_11 2000-04-10 12 val_12 2000-04-09 12 val_12 2000-04-09 12 val_12 2000-04-10 12 val_12 2000-04-10 15 val_15 2000-04-09 15 val_15 2000-04-09 15 val_15 2000-04-10 15 val_15 2000-04-10 17 val_17 2000-04-09 17 val_17 2000-04-10 18 val_18 2000-04-09 18 val_18 2000-04-09 18 val_18 2000-04-10 18 val_18 2000-04-10 19 val_19 2000-04-09 19 val_19 2000-04-10 PREHOOK: query: create table pcr_t2 (key int, value string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table pcr_t2 (key int, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@pcr_t2 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: create table pcr_t3 (key int, value string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table pcr_t3 (key int, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@pcr_t3 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: explain extended from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' insert overwrite table pcr_t3 select key, value where ds='2000-04-08' PREHOOK: type: QUERY POSTHOOK: query: explain extended from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' insert overwrite table pcr_t3 select key, value where ds='2000-04-08' POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME pcr_t2))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '2000-04-08'))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME pcr_t3))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '2000-04-08')))) STAGE DEPENDENCIES: Stage-2 is a root stage Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4 Stage-5 Stage-0 depends on stages: Stage-5, Stage-4 Stage-3 depends on stages: Stage-0 Stage-4 Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8 Stage-9 Stage-1 depends on stages: Stage-9, Stage-8 Stage-7 depends on stages: Stage-1 Stage-8 STAGE PLANS: Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 1 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 GatherStats: true MultiFileSpray: false Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 2 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10002/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 GatherStats: true MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Stage: Stage-6 Conditional Operator Stage: Stage-5 Move Operator files: hdfs directory: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004 destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10001 Stage: Stage-3 Stats-Aggr Operator Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10000/ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004 File Output Operator compressed: false GlobalTableId: 0 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 GatherStats: false MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10004 Partition base file name: -ext-10004 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 name: default.pcr_t2 Stage: Stage-10 Conditional Operator Stage: Stage-9 Move Operator files: hdfs directory: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005 destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10002 Stage: Stage-1 Move Operator tables: replace: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10003 Stage: Stage-7 Stats-Aggr Operator Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10002/ Stage: Stage-8 Map Reduce Alias -> Map Operator Tree: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005 File Output Operator compressed: false GlobalTableId: 0 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 GatherStats: false MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-08_961_9153972734051597410/-ext-10005 Partition base file name: -ext-10005 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311931568 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 name: default.pcr_t3 PREHOOK: query: from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' insert overwrite table pcr_t3 select key, value where ds='2000-04-08' PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Output: default@pcr_t2 PREHOOK: Output: default@pcr_t3 POSTHOOK: query: from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' insert overwrite table pcr_t3 select key, value where ds='2000-04-08' POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Output: default@pcr_t2 POSTHOOK: Output: default@pcr_t3 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: explain extended from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' and key=2 insert overwrite table pcr_t3 select key, value where ds='2000-04-08' and key=3 PREHOOK: type: QUERY POSTHOOK: query: explain extended from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' and key=2 insert overwrite table pcr_t3 select key, value where ds='2000-04-08' and key=3 POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME pcr_t1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME pcr_t2))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '2000-04-08') (= (TOK_TABLE_OR_COL key) 2)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME pcr_t3))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '2000-04-08') (= (TOK_TABLE_OR_COL key) 3))))) STAGE DEPENDENCIES: Stage-2 is a root stage Stage-6 depends on stages: Stage-2 , consists of Stage-5, Stage-4 Stage-5 Stage-0 depends on stages: Stage-5, Stage-4 Stage-3 depends on stages: Stage-0 Stage-4 Stage-10 depends on stages: Stage-2 , consists of Stage-9, Stage-8 Stage-9 Stage-1 depends on stages: Stage-9, Stage-8 Stage-7 depends on stages: Stage-1 Stage-8 STAGE PLANS: Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: pcr_t1 TableScan alias: pcr_t1 GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (key = 2) type: boolean Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 1 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10000/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 GatherStats: true MultiFileSpray: false Filter Operator isSamplingPred: false predicate: expr: (key = 3) type: boolean Select Operator expressions: expr: key type: int expr: value type: string outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 2 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10002/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 GatherStats: true MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 Partition base file name: ds=2000-04-08 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2000-04-08 properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 name default.pcr_t1 numFiles 1 numPartitions 4 numRows 20 partition_columns ds rawDataSize 160 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931513 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t1 name default.pcr_t1 numFiles 4 numPartitions 4 numRows 80 partition_columns ds rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 720 transient_lastDdlTime 1311931563 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 name: default.pcr_t1 Stage: Stage-6 Conditional Operator Stage: Stage-5 Move Operator files: hdfs directory: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004 destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10001 Stage: Stage-3 Stats-Aggr Operator Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10000/ Stage: Stage-4 Map Reduce Alias -> Map Operator Tree: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004 File Output Operator compressed: false GlobalTableId: 0 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 TotalFiles: 1 GatherStats: false MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10004 Partition base file name: -ext-10004 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t2 name default.pcr_t2 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 name: default.pcr_t2 Stage: Stage-10 Conditional Operator Stage: Stage-9 Move Operator files: hdfs directory: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005 destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10002 Stage: Stage-1 Move Operator tables: replace: true source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10002 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10003 Stage: Stage-7 Stats-Aggr Operator Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10002/ Stage: Stage-8 Map Reduce Alias -> Map Operator Tree: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005 File Output Operator compressed: false GlobalTableId: 0 directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10002 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 TotalFiles: 1 GatherStats: false MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-29_02-26-12_844_8364964969890288095/-ext-10005 Partition base file name: -ext-10005 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/pcr_t3 name default.pcr_t3 numFiles 1 numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe totalSize 180 transient_lastDdlTime 1311931572 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 name: default.pcr_t3 PREHOOK: query: from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' and key=2 insert overwrite table pcr_t3 select key, value where ds='2000-04-08' and key=3 PREHOOK: type: QUERY PREHOOK: Input: default@pcr_t1@ds=2000-04-08 PREHOOK: Output: default@pcr_t2 PREHOOK: Output: default@pcr_t3 POSTHOOK: query: from pcr_t1 insert overwrite table pcr_t2 select key, value where ds='2000-04-08' and key=2 insert overwrite table pcr_t3 select key, value where ds='2000-04-08' and key=3 POSTHOOK: type: QUERY POSTHOOK: Input: default@pcr_t1@ds=2000-04-08 POSTHOOK: Output: default@pcr_t2 POSTHOOK: Output: default@pcr_t3 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: explain extended select key, value from srcpart where ds='2008-04-08' and hr=11 order by key limit 10 PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value from srcpart where ds='2008-04-08' and hr=11 order by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (= (TOK_TABLE_OR_COL hr) 11))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key))) (TOK_LIMIT 10))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: srcpart TableScan alias: srcpart GatherStats: false Select Operator expressions: expr: key type: string expr: value type: string outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 11 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart Reduce Operator Tree: Extract Limit File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-26-16_781_3979674098856098756/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-26-16_781_3979674098856098756/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1 columns.types string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: 10 PREHOOK: query: select key, value from srcpart where ds='2008-04-04' and hr=11 order by key limit 10 PREHOOK: type: QUERY PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-16_831_5474496186957520159/-mr-10000 POSTHOOK: query: select key, value from srcpart where ds='2008-04-04' and hr=11 order by key limit 10 POSTHOOK: type: QUERY POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-16_831_5474496186957520159/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: explain extended select key, value, ds, hr from srcpart where ds='2008-04-08' and (hr='11' or hr='12') and key=11 order by key, ds, hr PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds, hr from srcpart where ds='2008-04-08' and (hr='11' or hr='12') and key=11 order by key, ds, hr POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_TABLE_OR_COL hr))) (TOK_WHERE (and (and (= (TOK_TABLE_OR_COL ds) '2008-04-08') (or (= (TOK_TABLE_OR_COL hr) '11') (= (TOK_TABLE_OR_COL hr) '12'))) (= (TOK_TABLE_OR_COL key) 11))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: srcpart TableScan alias: srcpart GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (key = 11) type: boolean Select Operator expressions: expr: key type: string expr: value type: string expr: ds type: string expr: hr type: string outputColumnNames: _col0, _col1, _col2, _col3 Reduce Output Operator key expressions: expr: _col0 type: string expr: _col2 type: string expr: _col3 type: string sort order: +++ tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string expr: _col2 type: string expr: _col3 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 11 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 Partition base file name: hr=12 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 12 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925580 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-26-19_426_8436546086449017020/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-26-19_426_8436546086449017020/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2,_col3 columns.types string:string:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds, hr from srcpart where ds='2008-04-08' and (hr='11' or hr='12') and key=11 order by key, ds, hr PREHOOK: type: QUERY PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-19_507_6265596136708347196/-mr-10000 POSTHOOK: query: select key, value, ds, hr from srcpart where ds='2008-04-08' and (hr='11' or hr='12') and key=11 order by key, ds, hr POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-19_507_6265596136708347196/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] 11 val_11 2008-04-08 11 11 val_11 2008-04-08 12 PREHOOK: query: explain extended select key, value, ds, hr from srcpart where hr='11' and key=11 order by key, ds, hr PREHOOK: type: QUERY POSTHOOK: query: explain extended select key, value, ds, hr from srcpart where hr='11' and key=11 order by key, ds, hr POSTHOOK: type: QUERY POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_TABLE_OR_COL hr))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL hr) '11') (= (TOK_TABLE_OR_COL key) 11))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ds)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL hr))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: srcpart TableScan alias: srcpart GatherStats: false Filter Operator isSamplingPred: false predicate: expr: (key = 11) type: boolean Select Operator expressions: expr: key type: string expr: value type: string expr: ds type: string expr: hr type: string outputColumnNames: _col0, _col1, _col2, _col3 Reduce Output Operator key expressions: expr: _col0 type: string expr: _col2 type: string expr: _col3 type: string sort order: +++ tag: -1 value expressions: expr: _col0 type: string expr: _col1 type: string expr: _col2 type: string expr: _col3 type: string Needs Tagging: false Path -> Alias: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 [srcpart] Path -> Partition: pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-08 hr 11 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 Partition base file name: hr=11 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat partition values: ds 2008-04-09 hr 11 properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925580 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: bucket_count -1 columns key,value columns.types string:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart name default.srcpart partition_columns ds/hr serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe transient_lastDdlTime 1311925579 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart name: default.srcpart Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 directory: file:/tmp/amarsri/hive_2011-07-29_02-26-22_220_5945751181226537643/-ext-10001 NumFilesPerFileSink: 1 Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-29_02-26-22_220_5945751181226537643/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: columns _col0,_col1,_col2,_col3 columns.types string:string:string:string escape.delim \ serialization.format 1 TotalFiles: 1 GatherStats: false MultiFileSpray: false Stage: Stage-0 Fetch Operator limit: -1 PREHOOK: query: select key, value, ds, hr from srcpart where hr='11' and key=11 order by key, ds, hr PREHOOK: type: QUERY PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-22_283_1922423093365833529/-mr-10000 POSTHOOK: query: select key, value, ds, hr from srcpart where hr='11' and key=11 order by key, ds, hr POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-29_02-26-22_283_1922423093365833529/-mr-10000 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] 11 val_11 2008-04-08 11 11 val_11 2008-04-09 11 PREHOOK: query: drop table pcr_t1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t1 PREHOOK: Output: default@pcr_t1 POSTHOOK: query: drop table pcr_t1 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@pcr_t1 POSTHOOK: Output: default@pcr_t1 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table pcr_t2 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t2 PREHOOK: Output: default@pcr_t2 POSTHOOK: query: drop table pcr_t2 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@pcr_t2 POSTHOOK: Output: default@pcr_t2 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: drop table pcr_t3 PREHOOK: type: DROPTABLE PREHOOK: Input: default@pcr_t3 PREHOOK: Output: default@pcr_t3 POSTHOOK: query: drop table pcr_t3 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@pcr_t3 POSTHOOK: Output: default@pcr_t3 POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-08).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-09).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t1 PARTITION(ds=2000-04-11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t2.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.key SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: pcr_t3.value SIMPLE [(pcr_t1)pcr_t1.FieldSchema(name:value, type:string, comment:null), ]