Saving all output to "!!{outputDirectory}!!/mergejoins.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/mergejoins.q >>> create table a (val1 int, val2 int); No rows affected >>> create table b (val1 int, val2 int); No rows affected >>> create table c (val1 int, val2 int); No rows affected >>> create table d (val1 int, val2 int); No rows affected >>> create table e (val1 int, val2 int); No rows affected >>> >>> explain select * from a join b on a.val1=b.val1 join c on a.val1=c.val1 join d on a.val1=d.val1 join e on a.val2=e.val2; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME a)) (TOK_TABREF (TOK_TABNAME b)) (= (. (TOK_TABLE_OR_COL a) val1) (. (TOK_TABLE_OR_COL b) val1))) (TOK_TABREF (TOK_TABNAME c)) (= (. (TOK_TABLE_OR_COL a) val1) (. (TOK_TABLE_OR_COL c) val1))) (TOK_TABREF (TOK_TABNAME d)) (= (. (TOK_TABLE_OR_COL a) val1) (. (TOK_TABLE_OR_COL d) val1))) (TOK_TABREF (TOK_TABNAME e)) (= (. (TOK_TABLE_OR_COL a) val2) (. (TOK_TABLE_OR_COL e) val2)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-2 depends on stages: Stage-1' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a ' ' TableScan' ' alias: a' ' Reduce Output Operator' ' key expressions:' ' expr: val1' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: val1' ' type: int' ' tag: 0' ' value expressions:' ' expr: val1' ' type: int' ' expr: val2' ' type: int' ' b ' ' TableScan' ' alias: b' ' Reduce Output Operator' ' key expressions:' ' expr: val1' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: val1' ' type: int' ' tag: 1' ' value expressions:' ' expr: val1' ' type: int' ' expr: val2' ' type: int' ' c ' ' TableScan' ' alias: c' ' Reduce Output Operator' ' key expressions:' ' expr: val1' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: val1' ' type: int' ' tag: 2' ' value expressions:' ' expr: val1' ' type: int' ' expr: val2' ' type: int' ' d ' ' TableScan' ' alias: d' ' Reduce Output Operator' ' key expressions:' ' expr: val1' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: val1' ' type: int' ' tag: 3' ' value expressions:' ' expr: val1' ' type: int' ' expr: val2' ' type: int' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' Inner Join 0 to 2' ' Inner Join 0 to 3' ' condition expressions:' ' 0 {VALUE._col0} {VALUE._col1}' ' 1 {VALUE._col0} {VALUE._col1}' ' 2 {VALUE._col0} {VALUE._col1}' ' 3 {VALUE._col0} {VALUE._col1}' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9, _col12, _col13' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.SequenceFileInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' '' ' Stage: Stage-2' ' Map Reduce' ' Alias -> Map Operator Tree:' ' $INTNAME ' ' Reduce Output Operator' ' key expressions:' ' expr: _col1' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col1' ' type: int' ' tag: 0' ' value expressions:' ' expr: _col12' ' type: int' ' expr: _col13' ' type: int' ' expr: _col4' ' type: int' ' expr: _col5' ' type: int' ' expr: _col8' ' type: int' ' expr: _col9' ' type: int' ' expr: _col0' ' type: int' ' expr: _col1' ' type: int' ' e ' ' TableScan' ' alias: e' ' Reduce Output Operator' ' key expressions:' ' expr: val2' ' type: int' ' sort order: +' ' Map-reduce partition columns:' ' expr: val2' ' type: int' ' tag: 1' ' value expressions:' ' expr: val1' ' type: int' ' expr: val2' ' type: int' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {VALUE._col0} {VALUE._col1} {VALUE._col4} {VALUE._col5} {VALUE._col8} {VALUE._col9} {VALUE._col12} {VALUE._col13}' ' 1 {VALUE._col0} {VALUE._col1}' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9, _col12, _col13, _col16, _col17' ' Select Operator' ' expressions:' ' expr: _col12' ' type: int' ' expr: _col13' ' type: int' ' expr: _col4' ' type: int' ' expr: _col5' ' type: int' ' expr: _col8' ' type: int' ' expr: _col9' ' type: int' ' expr: _col0' ' type: int' ' expr: _col1' ' type: int' ' expr: _col16' ' type: int' ' expr: _col17' ' type: int' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 191 rows selected >>> >>> --HIVE-3070 filter on outer join condition removed while merging join tree >>> explain select * from src a join src b on a.key=b.key left outer join src c on b.key=c.key and b.key<10; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME src) c) (and (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)) (< (. (TOK_TABLE_OR_COL b) key) 10)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a ' ' TableScan' ' alias: a' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 0' ' value expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' b ' ' TableScan' ' alias: b' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 1' ' value expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' c ' ' TableScan' ' alias: c' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 2' ' value expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' Left Outer Join1 to 2' ' condition expressions:' ' 0 {VALUE._col0} {VALUE._col1}' ' 1 {VALUE._col0} {VALUE._col1}' ' 2 {VALUE._col0} {VALUE._col1}' ' filter predicates:' ' 0 ' ' 1 {(VALUE._col0 < 10.0)}' ' 2 ' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col4' ' type: string' ' expr: _col5' ' type: string' ' expr: _col8' ' type: string' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 104 rows selected >>> !record