PREHOOK: query: -- This is to test the union->selectstar->filesink optimization -- Union of 2 map-reduce subqueries is performed followed by select and a file sink -- However, some columns are repeated. So, union cannot be removed. -- It does not matter, whether the output is merged or not. In this case, merging is turned -- off -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) -- Since this test creates sub-directories for the output table outputTbl1, it might be easier -- to run the test only on hadoop 23. The union is removed, the select (which selects columns from -- both the sub-qeuries of the union) is pushed above the union. create table inputTbl1(key string, val string) stored as textfile PREHOOK: type: CREATETABLE POSTHOOK: query: -- This is to test the union->selectstar->filesink optimization -- Union of 2 map-reduce subqueries is performed followed by select and a file sink -- However, some columns are repeated. So, union cannot be removed. -- It does not matter, whether the output is merged or not. In this case, merging is turned -- off -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23) -- Since this test creates sub-directories for the output table outputTbl1, it might be easier -- to run the test only on hadoop 23. The union is removed, the select (which selects columns from -- both the sub-qeuries of the union) is pushed above the union. create table inputTbl1(key string, val string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@inputTbl1 PREHOOK: query: create table outputTbl1(key string, values bigint, values2 bigint) stored as textfile PREHOOK: type: CREATETABLE POSTHOOK: query: create table outputTbl1(key string, values bigint, values2 bigint) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@outputTbl1 PREHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1 PREHOOK: type: LOAD PREHOOK: Output: default@inputtbl1 POSTHOOK: query: load data local inpath '../data/files/T1.txt' into table inputTbl1 POSTHOOK: type: LOAD POSTHOOK: Output: default@inputtbl1 PREHOOK: query: explain insert overwrite table outputTbl1 SELECT a.key, a.values, a.values FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a PREHOOK: type: QUERY POSTHOOK: query: explain insert overwrite table outputTbl1 SELECT a.key, a.values, a.values FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME inputTbl1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1) values)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME inputTbl1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1) values)) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME outputTbl1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) values)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) values))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1, Stage-2 Stage-2 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: null-subquery2:a-subquery2:inputtbl1 TableScan alias: inputtbl1 Select Operator expressions: expr: key type: string outputColumnNames: key Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: key type: string mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: string mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: string expr: _col1 type: bigint expr: _col1 type: bigint outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false GlobalTableId: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: null-subquery1:a-subquery1:inputtbl1 TableScan alias: inputtbl1 Select Operator expressions: expr: key type: string outputColumnNames: key Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: key type: string mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: string mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: string expr: _col1 type: bigint expr: _col1 type: bigint outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false GlobalTableId: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 PREHOOK: query: insert overwrite table outputTbl1 SELECT a.key, a.values, a.values FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a PREHOOK: type: QUERY PREHOOK: Input: default@inputtbl1 PREHOOK: Output: default@outputtbl1 POSTHOOK: query: insert overwrite table outputTbl1 SELECT a.key, a.values, a.values FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a POSTHOOK: type: QUERY POSTHOOK: Input: default@inputtbl1 POSTHOOK: Output: default@outputtbl1 POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] PREHOOK: query: desc formatted outputTbl1 PREHOOK: type: DESCTABLE POSTHOOK: query: desc formatted outputTbl1 POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] # col_name data_type comment key string None values bigint None values2 bigint None # Detailed Table Information Database: default #### A masked pattern was here #### Protect Mode: None Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe InputFormat: org.apache.hadoop.mapred.TextInputFormat OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Compressed: No Num Buckets: -1 Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 PREHOOK: query: select * from outputTbl1 PREHOOK: type: QUERY PREHOOK: Input: default@outputtbl1 #### A masked pattern was here #### POSTHOOK: query: select * from outputTbl1 POSTHOOK: type: QUERY POSTHOOK: Input: default@outputtbl1 #### A masked pattern was here #### POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] 1 1 1 2 1 1 3 1 1 7 1 1 8 2 2 1 1 1 2 1 1 3 1 1 7 1 1 8 2 2 PREHOOK: query: explain insert overwrite table outputTbl1 SELECT a.key, concat(a.values, a.values), concat(a.values, a.values) FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a PREHOOK: type: QUERY POSTHOOK: query: explain insert overwrite table outputTbl1 SELECT a.key, concat(a.values, a.values), concat(a.values, a.values) FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a POSTHOOK: type: QUERY POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME inputTbl1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1) values)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME inputTbl1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1) values)) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME outputTbl1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) values) (. (TOK_TABLE_OR_COL a) values))) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) values) (. (TOK_TABLE_OR_COL a) values)))))) STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1, Stage-2 Stage-2 is a root stage STAGE PLANS: Stage: Stage-1 Map Reduce Alias -> Map Operator Tree: null-subquery2:a-subquery2:inputtbl1 TableScan alias: inputtbl1 Select Operator expressions: expr: key type: string outputColumnNames: key Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: key type: string mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: string mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: string expr: UDFToLong(concat(_col1, _col1)) type: bigint expr: UDFToLong(concat(_col1, _col1)) type: bigint outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false GlobalTableId: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: null-subquery1:a-subquery1:inputtbl1 TableScan alias: inputtbl1 Select Operator expressions: expr: key type: string outputColumnNames: key Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: key type: string mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Map-reduce partition columns: expr: _col0 type: string tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: string mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: string expr: UDFToLong(concat(_col1, _col1)) type: bigint expr: UDFToLong(concat(_col1, _col1)) type: bigint outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false GlobalTableId: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 PREHOOK: query: insert overwrite table outputTbl1 SELECT a.key, concat(a.values, a.values), concat(a.values, a.values) FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a PREHOOK: type: QUERY PREHOOK: Input: default@inputtbl1 PREHOOK: Output: default@outputtbl1 POSTHOOK: query: insert overwrite table outputTbl1 SELECT a.key, concat(a.values, a.values), concat(a.values, a.values) FROM ( SELECT key, count(1) as values from inputTbl1 group by key UNION ALL SELECT key, count(1) as values from inputTbl1 group by key ) a POSTHOOK: type: QUERY POSTHOOK: Input: default@inputtbl1 POSTHOOK: Output: default@outputtbl1 POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] PREHOOK: query: select * from outputTbl1 order by key, values PREHOOK: type: QUERY PREHOOK: Input: default@outputtbl1 #### A masked pattern was here #### POSTHOOK: query: select * from outputTbl1 order by key, values POSTHOOK: type: QUERY POSTHOOK: Input: default@outputtbl1 #### A masked pattern was here #### POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] POSTHOOK: Lineage: outputtbl1.values2 EXPRESSION [(inputtbl1)inputtbl1.null, (inputtbl1)inputtbl1.null, ] 1 11 11 1 11 11 2 11 11 2 11 11 3 11 11 3 11 11 7 11 11 7 11 11 8 22 22 8 22 22