PREHOOK: query: -- SORT_QUERY_RESULTS drop table t1 PREHOOK: type: DROPTABLE POSTHOOK: query: -- SORT_QUERY_RESULTS drop table t1 POSTHOOK: type: DROPTABLE PREHOOK: query: drop table t2 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table t2 POSTHOOK: type: DROPTABLE PREHOOK: query: create table t1 as select * from src where key < 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@t1 POSTHOOK: query: create table t1 as select * from src where key < 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default POSTHOOK: Output: default@t1 PREHOOK: query: create table t2 as select * from src where key < 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@t2 POSTHOOK: query: create table t2 as select * from src where key < 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default POSTHOOK: Output: default@t2 PREHOOK: query: create table t3(key string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t3 POSTHOOK: query: create table t3(key string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t3 PREHOOK: query: create table t4(value string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t4 POSTHOOK: query: create table t4(value string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t4 PREHOOK: query: explain from (select * from t1 union all select * from t2 ) x insert overwrite table t3 select key, count(1) group by key insert overwrite table t4 select value, count(1) group by value PREHOOK: type: QUERY POSTHOOK: query: explain from (select * from t1 union all select * from t2 ) x insert overwrite table t3 select key, count(1) group by key insert overwrite table t4 select value, count(1) group by value POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-2 is a root stage Stage-0 depends on stages: Stage-2 Stage-3 depends on stages: Stage-0 Stage-4 depends on stages: Stage-2 Stage-1 depends on stages: Stage-4 Stage-5 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-2 Map Reduce Map Operator Tree: TableScan alias: t1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe TableScan alias: t2 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t3 Stage: Stage-3 Stats-Aggr Operator Stage: Stage-4 Map Reduce Map Operator Tree: TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 Stage: Stage-1 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t4 Stage: Stage-5 Stats-Aggr Operator PREHOOK: query: from (select * from t1 union all select * from t2 ) x insert overwrite table t3 select key, count(1) group by key insert overwrite table t4 select value, count(1) group by value PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Output: default@t3 PREHOOK: Output: default@t4 POSTHOOK: query: from (select * from t1 union all select * from t2 ) x insert overwrite table t3 select key, count(1) group by key insert overwrite table t4 select value, count(1) group by value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Output: default@t3 POSTHOOK: Output: default@t4 POSTHOOK: Lineage: t3.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] POSTHOOK: Lineage: t3.key EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), (t2)t2.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t4.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] POSTHOOK: Lineage: t4.value EXPRESSION [(t1)t1.FieldSchema(name:value, type:string, comment:null), (t2)t2.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from t3 PREHOOK: type: QUERY PREHOOK: Input: default@t3 #### A masked pattern was here #### POSTHOOK: query: select * from t3 POSTHOOK: type: QUERY POSTHOOK: Input: default@t3 #### A masked pattern was here #### 0 6 2 2 4 2 5 6 8 2 9 2 PREHOOK: query: select * from t4 PREHOOK: type: QUERY PREHOOK: Input: default@t4 #### A masked pattern was here #### POSTHOOK: query: select * from t4 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4 #### A masked pattern was here #### val_0 6 val_2 2 val_4 2 val_5 6 val_8 2 val_9 2 PREHOOK: query: create table t5(c1 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t5 POSTHOOK: query: create table t5(c1 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t5 PREHOOK: query: create table t6(c1 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t6 POSTHOOK: query: create table t6(c1 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t6 PREHOOK: query: explain from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, count(1) as cnt from t2 group by key ) x insert overwrite table t5 select c1, sum(cnt) group by c1 insert overwrite table t6 select c1, sum(cnt) group by c1 PREHOOK: type: QUERY POSTHOOK: query: explain from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, count(1) as cnt from t2 group by key ) x insert overwrite table t5 select c1, sum(cnt) group by c1 insert overwrite table t6 select c1, sum(cnt) group by c1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-2 is a root stage Stage-3 depends on stages: Stage-2, Stage-6 Stage-0 depends on stages: Stage-3 Stage-4 depends on stages: Stage-0 Stage-1 depends on stages: Stage-3 Stage-5 depends on stages: Stage-1 Stage-6 is a root stage STAGE PLANS: Stage: Stage-2 Map Reduce Map Operator Tree: TableScan alias: t1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-3 Map Reduce Map Operator Tree: TableScan Union Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) TableScan Union Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Forward Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t5 Stage: Stage-4 Stats-Aggr Operator Stage: Stage-1 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t6 Stage: Stage-5 Stats-Aggr Operator Stage: Stage-6 Map Reduce Map Operator Tree: TableScan alias: t2 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe PREHOOK: query: from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, count(1) as cnt from t2 group by key ) x insert overwrite table t5 select c1, sum(cnt) group by c1 insert overwrite table t6 select c1, sum(cnt) group by c1 PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Output: default@t5 PREHOOK: Output: default@t6 POSTHOOK: query: from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, count(1) as cnt from t2 group by key ) x insert overwrite table t5 select c1, sum(cnt) group by c1 insert overwrite table t6 select c1, sum(cnt) group by c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Output: default@t5 POSTHOOK: Output: default@t6 POSTHOOK: Lineage: t5.c1 EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), (t2)t2.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t5.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] POSTHOOK: Lineage: t6.c1 EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), (t2)t2.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t6.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] PREHOOK: query: select * from t5 PREHOOK: type: QUERY PREHOOK: Input: default@t5 #### A masked pattern was here #### POSTHOOK: query: select * from t5 POSTHOOK: type: QUERY POSTHOOK: Input: default@t5 #### A masked pattern was here #### 0 6 2 2 4 2 5 6 8 2 9 2 PREHOOK: query: select * from t6 PREHOOK: type: QUERY PREHOOK: Input: default@t6 #### A masked pattern was here #### POSTHOOK: query: select * from t6 POSTHOOK: type: QUERY POSTHOOK: Input: default@t6 #### A masked pattern was here #### 0 6 2 2 4 2 5 6 8 2 9 2 PREHOOK: query: drop table t1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@t1 PREHOOK: Output: default@t1 POSTHOOK: query: drop table t1 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@t1 POSTHOOK: Output: default@t1 PREHOOK: query: drop table t2 PREHOOK: type: DROPTABLE PREHOOK: Input: default@t2 PREHOOK: Output: default@t2 POSTHOOK: query: drop table t2 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@t2 POSTHOOK: Output: default@t2 PREHOOK: query: create table t1 as select * from src where key < 10 PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@t1 POSTHOOK: query: create table t1 as select * from src where key < 10 POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default POSTHOOK: Output: default@t1 PREHOOK: query: create table t2 as select key, count(1) as cnt from src where key < 10 group by key PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@src PREHOOK: Output: database:default PREHOOK: Output: default@t2 POSTHOOK: query: create table t2 as select key, count(1) as cnt from src where key < 10 group by key POSTHOOK: type: CREATETABLE_AS_SELECT POSTHOOK: Input: default@src POSTHOOK: Output: database:default POSTHOOK: Output: default@t2 PREHOOK: query: create table t7(c1 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t7 POSTHOOK: query: create table t7(c1 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t7 PREHOOK: query: create table t8(c1 string, cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@t8 POSTHOOK: query: create table t8(c1 string, cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@t8 PREHOOK: query: explain from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, cnt from t2 ) x insert overwrite table t7 select c1, count(1) group by c1 insert overwrite table t8 select c1, count(1) group by c1 PREHOOK: type: QUERY POSTHOOK: query: explain from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, cnt from t2 ) x insert overwrite table t7 select c1, count(1) group by c1 insert overwrite table t8 select c1, count(1) group by c1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-2 is a root stage Stage-3 depends on stages: Stage-2 Stage-0 depends on stages: Stage-3 Stage-4 depends on stages: Stage-0 Stage-1 depends on stages: Stage-3 Stage-5 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-2 Map Reduce Map Operator Tree: TableScan alias: t1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Stage: Stage-3 Map Reduce Map Operator Tree: TableScan Union Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE Union Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Forward Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t7 Stage: Stage-4 Stats-Aggr Operator Stage: Stage-1 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t8 Stage: Stage-5 Stats-Aggr Operator PREHOOK: query: from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, cnt from t2 ) x insert overwrite table t7 select c1, count(1) group by c1 insert overwrite table t8 select c1, count(1) group by c1 PREHOOK: type: QUERY PREHOOK: Input: default@t1 PREHOOK: Input: default@t2 PREHOOK: Output: default@t7 PREHOOK: Output: default@t8 POSTHOOK: query: from ( select key as c1, count(1) as cnt from t1 group by key union all select key as c1, cnt from t2 ) x insert overwrite table t7 select c1, count(1) group by c1 insert overwrite table t8 select c1, count(1) group by c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@t1 POSTHOOK: Input: default@t2 POSTHOOK: Output: default@t7 POSTHOOK: Output: default@t8 POSTHOOK: Lineage: t7.c1 EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), (t2)t2.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t7.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] POSTHOOK: Lineage: t8.c1 EXPRESSION [(t1)t1.FieldSchema(name:key, type:string, comment:null), (t2)t2.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: t8.cnt EXPRESSION [(t1)t1.null, (t2)t2.null, ] PREHOOK: query: select * from t7 PREHOOK: type: QUERY PREHOOK: Input: default@t7 #### A masked pattern was here #### POSTHOOK: query: select * from t7 POSTHOOK: type: QUERY POSTHOOK: Input: default@t7 #### A masked pattern was here #### 0 2 2 2 4 2 5 2 8 2 9 2 PREHOOK: query: select * from t8 PREHOOK: type: QUERY PREHOOK: Input: default@t8 #### A masked pattern was here #### POSTHOOK: query: select * from t8 POSTHOOK: type: QUERY POSTHOOK: Input: default@t8 #### A masked pattern was here #### 0 2 2 2 4 2 5 2 8 2 9 2