Saving all output to "!!{outputDirectory}!!/auto_join15.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/auto_join15.q >>> >>> set hive.auto.convert.join = true; No rows affected >>> >>> explain select sum(hash(a.k1,a.v1,a.k2, a.v2)) from ( SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 FROM src src1 JOIN src src2 ON (src1.key = src2.key) SORT BY k1, v1, k2, v2 ) a; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) src1) (TOK_TABREF (TOK_TABNAME src) src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) v2)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v2))))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) k1) (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL a) k2) (. (TOK_TABLE_OR_COL a) v2)))))))' '' 'STAGE DEPENDENCIES:' ' Stage-7 is a root stage , consists of Stage-8, Stage-9, Stage-1' ' Stage-8 has a backup stage: Stage-1' ' Stage-5 depends on stages: Stage-8' ' Stage-2 depends on stages: Stage-1, Stage-5, Stage-6' ' Stage-3 depends on stages: Stage-2' ' Stage-9 has a backup stage: Stage-1' ' Stage-6 depends on stages: Stage-9' ' Stage-1' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-8' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' a:src2 ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' a:src2 ' ' TableScan' ' alias: src2' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 0' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a:src1 ' ' TableScan' ' alias: src1' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5' ' Position of Big Table: 0' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col4' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.SequenceFileInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-2' ' Map Reduce' ' Alias -> Map Operator Tree:' ' file:!!{hive.exec.scratchdir}!! ' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' expr: _col3' ' type: string' ' sort order: ++++' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' expr: _col3' ' type: string' ' Reduce Operator Tree:' ' Extract' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' expr: _col3' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3' ' Group By Operator' ' aggregations:' ' expr: sum(hash(_col0,_col1,_col2,_col3))' ' bucketGroup: false' ' mode: hash' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.SequenceFileInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' file:!!{hive.exec.scratchdir}!! ' ' Reduce Output Operator' ' sort order: ' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: sum(VALUE._col0)' ' bucketGroup: false' ' mode: mergepartial' ' outputColumnNames: _col0' ' Select Operator' ' expressions:' ' expr: _col0' ' type: bigint' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-9' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' a:src1 ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' a:src1 ' ' TableScan' ' alias: src1' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 1' '' ' Stage: Stage-6' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a:src2 ' ' TableScan' ' alias: src2' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col4' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.SequenceFileInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a:src1 ' ' TableScan' ' alias: src1' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 0' ' value expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' a:src2 ' ' TableScan' ' alias: src2' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 1' ' value expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {VALUE._col0} {VALUE._col1}' ' 1 {VALUE._col0} {VALUE._col1}' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col1, _col4, _col5' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col4' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.SequenceFileInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 283 rows selected >>> >>> >>> select sum(hash(a.k1,a.v1,a.k2, a.v2)) from ( SELECT src1.key as k1, src1.value as v1, src2.key as k2, src2.value as v2 FROM src src1 JOIN src src2 ON (src1.key = src2.key) SORT BY k1, v1, k2, v2 ) a; '_c0' '524272996896' 1 row selected >>> >>> !record