PREHOOK: query: CREATE TABLE DEST1(key ARRAY, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE DEST1(key ARRAY, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@DEST1 PREHOOK: query: CREATE TABLE DEST2(key MAP, value BIGINT) STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE DEST2(key MAP, value BIGINT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@DEST2 PREHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) limit 10 INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value) limit 10 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) limit 10 INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value) limit 10 POSTHOOK: type: QUERY ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME SRC))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME DEST1))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION ARRAY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_SELEXPR (TOK_FUNCTION COUNT 1))) (TOK_GROUPBY (TOK_FUNCTION ARRAY (. (TOK_TABLE_OR_COL SRC) key))) (TOK_LIMIT 10)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME DEST2))) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION MAP (. (TOK_TABLE_OR_COL SRC) key) (. (TOK_TABLE_OR_COL SRC) value))) (TOK_SELEXPR (TOK_FUNCTION COUNT 1))) (TOK_GROUPBY (TOK_FUNCTION MAP (. (TOK_TABLE_OR_COL SRC) key) (. (TOK_TABLE_OR_COL SRC) value))) (TOK_LIMIT 10))) STAGE DEPENDENCIES: Stage-2 is a root stage Stage-3 depends on stages: Stage-2 Stage-0 depends on stages: Stage-3 Stage-4 depends on stages: Stage-0 Stage-5 depends on stages: Stage-2 Stage-6 depends on stages: Stage-5 Stage-1 depends on stages: Stage-6 Stage-7 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: src TableScan alias: src Select Operator expressions: expr: key type: string outputColumnNames: key Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: array(key) type: array mode: hash outputColumnNames: _col0, _col1 Reduce Output Operator key expressions: expr: _col0 type: array sort order: + Map-reduce partition columns: expr: _col0 type: array tag: -1 value expressions: expr: _col1 type: bigint Select Operator expressions: expr: key type: string expr: value type: string outputColumnNames: key, value Group By Operator aggregations: expr: count(1) bucketGroup: false keys: expr: map(key:value) type: map mode: hash outputColumnNames: _col0, _col1 File Output Operator compressed: false GlobalTableId: 0 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: array mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: array expr: _col1 type: bigint outputColumnNames: _col0, _col1 Limit File Output Operator compressed: false GlobalTableId: 0 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: #### A masked pattern was here #### Reduce Output Operator sort order: tag: -1 value expressions: expr: _col0 type: array expr: _col1 type: bigint Reduce Operator Tree: Extract Limit File Output Operator compressed: false GlobalTableId: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 Stage: Stage-0 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 Stage: Stage-4 Stats-Aggr Operator Stage: Stage-5 Map Reduce Alias -> Map Operator Tree: #### A masked pattern was here #### Reduce Output Operator key expressions: expr: _col0 type: map sort order: + Map-reduce partition columns: expr: _col0 type: map tag: -1 value expressions: expr: _col1 type: bigint Reduce Operator Tree: Group By Operator aggregations: expr: count(VALUE._col0) bucketGroup: false keys: expr: KEY._col0 type: map mode: mergepartial outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 type: map expr: _col1 type: bigint outputColumnNames: _col0, _col1 Limit File Output Operator compressed: false GlobalTableId: 0 table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-6 Map Reduce Alias -> Map Operator Tree: #### A masked pattern was here #### Reduce Output Operator sort order: tag: -1 value expressions: expr: _col0 type: map expr: _col1 type: bigint Reduce Operator Tree: Extract Limit File Output Operator compressed: false GlobalTableId: 2 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 Stage: Stage-1 Move Operator tables: replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 Stage: Stage-7 Stats-Aggr Operator PREHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) limit 10 INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value) limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest1 PREHOOK: Output: default@dest2 POSTHOOK: query: FROM SRC INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key) limit 10 INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value) limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Output: default@dest2 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.value EXPRESSION [(src)src.null, ] PREHOOK: query: SELECT DEST1.* FROM DEST1 ORDER BY key[0] ASC, value ASC PREHOOK: type: QUERY PREHOOK: Input: default@dest1 #### A masked pattern was here #### POSTHOOK: query: SELECT DEST1.* FROM DEST1 ORDER BY key[0] ASC, value ASC POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 #### A masked pattern was here #### POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.value EXPRESSION [(src)src.null, ] ["0"] 3 ["10"] 1 ["100"] 2 ["103"] 2 ["104"] 2 ["105"] 1 ["11"] 1 ["111"] 1 ["113"] 2 ["114"] 1 PREHOOK: query: SELECT DEST2.* FROM DEST2 ORDER BY 1 ASC, value ASC PREHOOK: type: QUERY PREHOOK: Input: default@dest2 #### A masked pattern was here #### POSTHOOK: query: SELECT DEST2.* FROM DEST2 ORDER BY 1 ASC, value ASC POSTHOOK: type: QUERY POSTHOOK: Input: default@dest2 #### A masked pattern was here #### POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: dest2.value EXPRESSION [(src)src.null, ] {"10":"val_10"} 1 {"105":"val_105"} 1 {"11":"val_11"} 1 {"111":"val_111"} 1 {"114":"val_114"} 1 {"100":"val_100"} 2 {"103":"val_103"} 2 {"104":"val_104"} 2 {"113":"val_113"} 2 {"0":"val_0"} 3